Jan 27 07:51:33 crc systemd[1]: Starting Kubernetes Kubelet... Jan 27 07:51:33 crc restorecon[4695]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:33 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 07:51:34 crc restorecon[4695]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 07:51:34 crc restorecon[4695]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 27 07:51:34 crc kubenswrapper[4787]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 27 07:51:34 crc kubenswrapper[4787]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 27 07:51:34 crc kubenswrapper[4787]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 27 07:51:34 crc kubenswrapper[4787]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 27 07:51:34 crc kubenswrapper[4787]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 27 07:51:34 crc kubenswrapper[4787]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.816264 4787 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822529 4787 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822570 4787 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822578 4787 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822584 4787 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822591 4787 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822597 4787 feature_gate.go:330] unrecognized feature gate: Example Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822602 4787 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822609 4787 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822614 4787 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822620 4787 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822626 4787 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822632 4787 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822638 4787 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822654 4787 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822663 4787 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822669 4787 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822675 4787 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822681 4787 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822687 4787 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822693 4787 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822699 4787 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822705 4787 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822711 4787 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822717 4787 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822722 4787 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822728 4787 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822733 4787 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822738 4787 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822743 4787 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822749 4787 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822755 4787 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822760 4787 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822766 4787 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822771 4787 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822776 4787 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822784 4787 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822792 4787 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822798 4787 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822804 4787 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822810 4787 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822815 4787 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822821 4787 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822826 4787 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822831 4787 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822837 4787 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822842 4787 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822850 4787 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822856 4787 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822862 4787 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822869 4787 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822876 4787 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822883 4787 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822890 4787 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822896 4787 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822902 4787 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822908 4787 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822914 4787 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822919 4787 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822925 4787 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822930 4787 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822935 4787 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822940 4787 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822947 4787 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822952 4787 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822957 4787 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822962 4787 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822968 4787 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822974 4787 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822979 4787 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822984 4787 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.822989 4787 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823098 4787 flags.go:64] FLAG: --address="0.0.0.0" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823111 4787 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823122 4787 flags.go:64] FLAG: --anonymous-auth="true" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823130 4787 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823138 4787 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823145 4787 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823153 4787 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823162 4787 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823169 4787 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823176 4787 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823183 4787 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823190 4787 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823197 4787 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823203 4787 flags.go:64] FLAG: --cgroup-root="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823210 4787 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823216 4787 flags.go:64] FLAG: --client-ca-file="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823222 4787 flags.go:64] FLAG: --cloud-config="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823228 4787 flags.go:64] FLAG: --cloud-provider="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823234 4787 flags.go:64] FLAG: --cluster-dns="[]" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823240 4787 flags.go:64] FLAG: --cluster-domain="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823246 4787 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823253 4787 flags.go:64] FLAG: --config-dir="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823259 4787 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823265 4787 flags.go:64] FLAG: --container-log-max-files="5" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823273 4787 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823279 4787 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823285 4787 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823291 4787 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823297 4787 flags.go:64] FLAG: --contention-profiling="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823303 4787 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823309 4787 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823315 4787 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823322 4787 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823329 4787 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823335 4787 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823341 4787 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823347 4787 flags.go:64] FLAG: --enable-load-reader="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823353 4787 flags.go:64] FLAG: --enable-server="true" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823359 4787 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823368 4787 flags.go:64] FLAG: --event-burst="100" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823374 4787 flags.go:64] FLAG: --event-qps="50" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823380 4787 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823387 4787 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823393 4787 flags.go:64] FLAG: --eviction-hard="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823400 4787 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823406 4787 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823412 4787 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823419 4787 flags.go:64] FLAG: --eviction-soft="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823425 4787 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823431 4787 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823436 4787 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823442 4787 flags.go:64] FLAG: --experimental-mounter-path="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823448 4787 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823454 4787 flags.go:64] FLAG: --fail-swap-on="true" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823460 4787 flags.go:64] FLAG: --feature-gates="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823468 4787 flags.go:64] FLAG: --file-check-frequency="20s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823474 4787 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823480 4787 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823486 4787 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823493 4787 flags.go:64] FLAG: --healthz-port="10248" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823499 4787 flags.go:64] FLAG: --help="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823505 4787 flags.go:64] FLAG: --hostname-override="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823511 4787 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823517 4787 flags.go:64] FLAG: --http-check-frequency="20s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823523 4787 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823529 4787 flags.go:64] FLAG: --image-credential-provider-config="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823535 4787 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823541 4787 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823564 4787 flags.go:64] FLAG: --image-service-endpoint="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823571 4787 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823577 4787 flags.go:64] FLAG: --kube-api-burst="100" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823585 4787 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823592 4787 flags.go:64] FLAG: --kube-api-qps="50" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823598 4787 flags.go:64] FLAG: --kube-reserved="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823604 4787 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823610 4787 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823616 4787 flags.go:64] FLAG: --kubelet-cgroups="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823622 4787 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823627 4787 flags.go:64] FLAG: --lock-file="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823633 4787 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823639 4787 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823645 4787 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823655 4787 flags.go:64] FLAG: --log-json-split-stream="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823662 4787 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823676 4787 flags.go:64] FLAG: --log-text-split-stream="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823683 4787 flags.go:64] FLAG: --logging-format="text" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823691 4787 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823699 4787 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823706 4787 flags.go:64] FLAG: --manifest-url="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823713 4787 flags.go:64] FLAG: --manifest-url-header="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823723 4787 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823729 4787 flags.go:64] FLAG: --max-open-files="1000000" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823737 4787 flags.go:64] FLAG: --max-pods="110" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823743 4787 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823749 4787 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823755 4787 flags.go:64] FLAG: --memory-manager-policy="None" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823761 4787 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823767 4787 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823773 4787 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823780 4787 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823793 4787 flags.go:64] FLAG: --node-status-max-images="50" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823799 4787 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823805 4787 flags.go:64] FLAG: --oom-score-adj="-999" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823812 4787 flags.go:64] FLAG: --pod-cidr="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823819 4787 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823827 4787 flags.go:64] FLAG: --pod-manifest-path="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823833 4787 flags.go:64] FLAG: --pod-max-pids="-1" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823839 4787 flags.go:64] FLAG: --pods-per-core="0" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823845 4787 flags.go:64] FLAG: --port="10250" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823852 4787 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823858 4787 flags.go:64] FLAG: --provider-id="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823863 4787 flags.go:64] FLAG: --qos-reserved="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823869 4787 flags.go:64] FLAG: --read-only-port="10255" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823876 4787 flags.go:64] FLAG: --register-node="true" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823882 4787 flags.go:64] FLAG: --register-schedulable="true" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823888 4787 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823901 4787 flags.go:64] FLAG: --registry-burst="10" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823907 4787 flags.go:64] FLAG: --registry-qps="5" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823913 4787 flags.go:64] FLAG: --reserved-cpus="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823918 4787 flags.go:64] FLAG: --reserved-memory="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823926 4787 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823932 4787 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823938 4787 flags.go:64] FLAG: --rotate-certificates="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823944 4787 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823950 4787 flags.go:64] FLAG: --runonce="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823956 4787 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823963 4787 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823969 4787 flags.go:64] FLAG: --seccomp-default="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823974 4787 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823980 4787 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823987 4787 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823993 4787 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.823999 4787 flags.go:64] FLAG: --storage-driver-password="root" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824006 4787 flags.go:64] FLAG: --storage-driver-secure="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824012 4787 flags.go:64] FLAG: --storage-driver-table="stats" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824019 4787 flags.go:64] FLAG: --storage-driver-user="root" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824026 4787 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824033 4787 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824040 4787 flags.go:64] FLAG: --system-cgroups="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824046 4787 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824057 4787 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824063 4787 flags.go:64] FLAG: --tls-cert-file="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824069 4787 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824076 4787 flags.go:64] FLAG: --tls-min-version="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824082 4787 flags.go:64] FLAG: --tls-private-key-file="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824087 4787 flags.go:64] FLAG: --topology-manager-policy="none" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824094 4787 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824100 4787 flags.go:64] FLAG: --topology-manager-scope="container" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824108 4787 flags.go:64] FLAG: --v="2" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824116 4787 flags.go:64] FLAG: --version="false" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824124 4787 flags.go:64] FLAG: --vmodule="" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824131 4787 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824137 4787 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824283 4787 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824290 4787 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824296 4787 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824301 4787 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824307 4787 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824313 4787 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824319 4787 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824325 4787 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824330 4787 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824336 4787 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824341 4787 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824346 4787 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824351 4787 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824356 4787 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824362 4787 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824368 4787 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824373 4787 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824378 4787 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824383 4787 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824389 4787 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824395 4787 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824402 4787 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824408 4787 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824413 4787 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824419 4787 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824424 4787 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824429 4787 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824434 4787 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824439 4787 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824444 4787 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824450 4787 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824455 4787 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824460 4787 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824465 4787 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824471 4787 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824477 4787 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824483 4787 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824489 4787 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824495 4787 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824500 4787 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824507 4787 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824514 4787 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824521 4787 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824527 4787 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824533 4787 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824539 4787 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824544 4787 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824568 4787 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824574 4787 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824579 4787 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824585 4787 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824590 4787 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824597 4787 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824604 4787 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824609 4787 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824616 4787 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824622 4787 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824628 4787 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824634 4787 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824641 4787 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824648 4787 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824655 4787 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824661 4787 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824667 4787 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824672 4787 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824678 4787 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824683 4787 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824689 4787 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824695 4787 feature_gate.go:330] unrecognized feature gate: Example Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824700 4787 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.824705 4787 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.824745 4787 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.836360 4787 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.836423 4787 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836585 4787 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836610 4787 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836620 4787 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836631 4787 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836640 4787 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836650 4787 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836658 4787 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836667 4787 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836676 4787 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836684 4787 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836726 4787 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836737 4787 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836745 4787 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836753 4787 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836765 4787 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836778 4787 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836787 4787 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836796 4787 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836807 4787 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836818 4787 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836830 4787 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836844 4787 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836861 4787 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836871 4787 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836881 4787 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836890 4787 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836899 4787 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836908 4787 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836916 4787 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836924 4787 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836932 4787 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836941 4787 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836950 4787 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836958 4787 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836969 4787 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836977 4787 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836986 4787 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.836995 4787 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837004 4787 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837012 4787 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837021 4787 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837029 4787 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837038 4787 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837046 4787 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837054 4787 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837063 4787 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837072 4787 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837080 4787 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837089 4787 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837097 4787 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837105 4787 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837114 4787 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837125 4787 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837135 4787 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837144 4787 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837154 4787 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837163 4787 feature_gate.go:330] unrecognized feature gate: Example Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837172 4787 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837180 4787 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837191 4787 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837202 4787 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837212 4787 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837222 4787 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837232 4787 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837241 4787 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837250 4787 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837258 4787 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837269 4787 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837277 4787 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837286 4787 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837295 4787 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.837310 4787 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837545 4787 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837593 4787 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837602 4787 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837611 4787 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837620 4787 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837629 4787 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837638 4787 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837646 4787 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837655 4787 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837663 4787 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837672 4787 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837685 4787 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837696 4787 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837705 4787 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837714 4787 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837724 4787 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837733 4787 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837742 4787 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837750 4787 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837759 4787 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837768 4787 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837776 4787 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837785 4787 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837793 4787 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837801 4787 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837810 4787 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837818 4787 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837826 4787 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837835 4787 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837843 4787 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837852 4787 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837861 4787 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837869 4787 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837877 4787 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837887 4787 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837896 4787 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837905 4787 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837914 4787 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837922 4787 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837930 4787 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837938 4787 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837947 4787 feature_gate.go:330] unrecognized feature gate: Example Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837956 4787 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837965 4787 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837973 4787 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837981 4787 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837990 4787 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.837999 4787 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838007 4787 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838016 4787 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838025 4787 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838034 4787 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838043 4787 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838051 4787 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838060 4787 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838068 4787 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838076 4787 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838085 4787 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838093 4787 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838104 4787 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838115 4787 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838125 4787 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838133 4787 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838143 4787 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838154 4787 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838165 4787 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838174 4787 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838189 4787 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838204 4787 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838216 4787 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 27 07:51:34 crc kubenswrapper[4787]: W0127 07:51:34.838227 4787 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.838245 4787 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.839599 4787 server.go:940] "Client rotation is on, will bootstrap in background" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.849231 4787 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.849372 4787 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.851165 4787 server.go:997] "Starting client certificate rotation" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.851221 4787 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.851386 4787 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-14 13:22:23.858521109 +0000 UTC Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.851477 4787 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.891491 4787 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.895299 4787 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 27 07:51:34 crc kubenswrapper[4787]: E0127 07:51:34.897642 4787 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.914340 4787 log.go:25] "Validated CRI v1 runtime API" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.958857 4787 log.go:25] "Validated CRI v1 image API" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.961664 4787 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.967164 4787 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-27-07-46-44-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.967212 4787 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:42 fsType:tmpfs blockSize:0}] Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.990973 4787 manager.go:217] Machine: {Timestamp:2026-01-27 07:51:34.984763526 +0000 UTC m=+0.637119028 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:553a2493-323d-43ed-bfcf-44c5a8746c19 BootID:55ded10c-ad4c-443d-a571-c7a8f4a50b03 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:42 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:56:54:61 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:56:54:61 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:a7:98:db Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:87:11:96 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:c4:31:cf Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:91:b5:4d Speed:-1 Mtu:1496} {Name:eth10 MacAddress:96:47:67:9d:e3:9a Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:26:e4:9e:90:4f:2f Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.991844 4787 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.992264 4787 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.995090 4787 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.995601 4787 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.995683 4787 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.996123 4787 topology_manager.go:138] "Creating topology manager with none policy" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.996152 4787 container_manager_linux.go:303] "Creating device plugin manager" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.996847 4787 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.996928 4787 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.997355 4787 state_mem.go:36] "Initialized new in-memory state store" Jan 27 07:51:34 crc kubenswrapper[4787]: I0127 07:51:34.997603 4787 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.003852 4787 kubelet.go:418] "Attempting to sync node with API server" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.003917 4787 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.003970 4787 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.003995 4787 kubelet.go:324] "Adding apiserver pod source" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.004016 4787 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.009691 4787 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 27 07:51:35 crc kubenswrapper[4787]: W0127 07:51:35.010240 4787 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.010321 4787 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Jan 27 07:51:35 crc kubenswrapper[4787]: W0127 07:51:35.010404 4787 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.010606 4787 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.010722 4787 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.013685 4787 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.015199 4787 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.015228 4787 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.015239 4787 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.015248 4787 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.015264 4787 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.015274 4787 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.015284 4787 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.015335 4787 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.015350 4787 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.015362 4787 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.015392 4787 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.015402 4787 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.017940 4787 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.018586 4787 server.go:1280] "Started kubelet" Jan 27 07:51:35 crc systemd[1]: Started Kubernetes Kubelet. Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.020414 4787 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.020363 4787 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.021755 4787 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.021881 4787 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.022360 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.022386 4787 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.022503 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 06:59:03.249798544 +0000 UTC Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.022716 4787 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.022788 4787 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.022804 4787 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.023584 4787 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.023617 4787 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="200ms" Jan 27 07:51:35 crc kubenswrapper[4787]: W0127 07:51:35.023806 4787 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.023851 4787 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.025354 4787 server.go:460] "Adding debug handlers to kubelet server" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.026146 4787 factory.go:55] Registering systemd factory Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.026180 4787 factory.go:221] Registration of the systemd container factory successfully Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.026529 4787 factory.go:153] Registering CRI-O factory Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.026589 4787 factory.go:221] Registration of the crio container factory successfully Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.026685 4787 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.033437 4787 factory.go:103] Registering Raw factory Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.033493 4787 manager.go:1196] Started watching for new ooms in manager Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.025917 4787 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.181:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188e8724a28727bf default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-27 07:51:35.018522559 +0000 UTC m=+0.670878061,LastTimestamp:2026-01-27 07:51:35.018522559 +0000 UTC m=+0.670878061,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.037846 4787 manager.go:319] Starting recovery of all containers Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043577 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043655 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043668 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043679 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043689 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043699 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043708 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043717 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043728 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043738 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043748 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043757 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043785 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043800 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043808 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043818 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043827 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043835 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043843 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043854 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043863 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043873 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043885 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043895 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043903 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043918 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043950 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043961 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.043972 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044000 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044011 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044020 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044029 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044040 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044049 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044058 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044068 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044077 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044085 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044095 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044106 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044115 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044143 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044153 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044162 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044172 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044181 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044190 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044199 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044208 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044217 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044226 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044240 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044250 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044262 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044273 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044282 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044291 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044300 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044311 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044319 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044358 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044371 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044380 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044390 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044398 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044409 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044438 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044447 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044456 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044468 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044477 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044488 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044501 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044511 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044521 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044531 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.044541 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045056 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045067 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045077 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045087 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045097 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045111 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045122 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045133 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045143 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045157 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045166 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045198 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045208 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045219 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045229 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045240 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045249 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045260 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045269 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045282 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045291 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045302 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045312 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045322 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045333 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045343 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045358 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045368 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045379 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045389 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045399 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045410 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045422 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045432 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045442 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045453 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045462 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045472 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045482 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045491 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045502 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045523 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045533 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045543 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045564 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045574 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045582 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045593 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045603 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045613 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045622 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045631 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045640 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045650 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045659 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045668 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045686 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045694 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045743 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045752 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045761 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045769 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045776 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045785 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045794 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045802 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045810 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045818 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045826 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045835 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045843 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045851 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045860 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045871 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045880 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045889 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045898 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045908 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045918 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045927 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045936 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045947 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045956 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045964 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045974 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045984 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.045994 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046002 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046017 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046026 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046034 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046044 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046053 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046063 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046074 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046083 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046091 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046101 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046111 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046127 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046137 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046147 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046156 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046166 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046177 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046186 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046194 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046204 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046214 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046222 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046231 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046240 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046249 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046259 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046267 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046275 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046284 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046292 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046301 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046311 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046320 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046329 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046338 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046347 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046356 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.046366 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.048186 4787 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.048207 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.048217 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.048228 4787 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.048236 4787 reconstruct.go:97] "Volume reconstruction finished" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.048243 4787 reconciler.go:26] "Reconciler: start to sync state" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.064444 4787 manager.go:324] Recovery completed Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.073046 4787 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.075256 4787 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.075297 4787 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.075330 4787 kubelet.go:2335] "Starting kubelet main sync loop" Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.075466 4787 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.075628 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.077228 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.077300 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.077313 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:35 crc kubenswrapper[4787]: W0127 07:51:35.077831 4787 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.077895 4787 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.077956 4787 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.077967 4787 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.077985 4787 state_mem.go:36] "Initialized new in-memory state store" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.101352 4787 policy_none.go:49] "None policy: Start" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.102867 4787 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.102917 4787 state_mem.go:35] "Initializing new in-memory state store" Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.123389 4787 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.168915 4787 manager.go:334] "Starting Device Plugin manager" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.168979 4787 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.168997 4787 server.go:79] "Starting device plugin registration server" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.169584 4787 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.169604 4787 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.170109 4787 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.170244 4787 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.170258 4787 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.175595 4787 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.175718 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.180609 4787 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.182222 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.182306 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.182322 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.182605 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.183357 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.183414 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.184234 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.184267 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.184281 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.184562 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.184594 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.184607 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.184780 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.185207 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.185247 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.186715 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.186747 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.186759 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.186832 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.186871 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.186887 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.187123 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.187297 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.187340 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.188389 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.188424 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.188436 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.188615 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.188705 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.188742 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.189580 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.189610 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.189622 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.189622 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.189706 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.189727 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.189667 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.189768 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.189784 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.189957 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.189993 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.191524 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.191618 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.191630 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.225050 4787 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="400ms" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250075 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250143 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250185 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250218 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250252 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250335 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250370 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250405 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250461 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250532 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250608 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250638 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250664 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250684 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.250712 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.270012 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.271278 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.271317 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.271328 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.271356 4787 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.271868 4787 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.343324 4787 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.181:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188e8724a28727bf default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-27 07:51:35.018522559 +0000 UTC m=+0.670878061,LastTimestamp:2026-01-27 07:51:35.018522559 +0000 UTC m=+0.670878061,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.351433 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.351520 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.351588 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.351678 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.351700 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.351717 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.351766 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.351816 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.351901 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.351931 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.351941 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.351907 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352013 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352013 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.351911 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352052 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352061 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352085 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352108 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352139 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352170 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352203 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352243 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352278 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352696 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352757 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352770 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352805 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352809 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.352846 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.472037 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.474769 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.474817 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.474835 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.474871 4787 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.475454 4787 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.515293 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.521538 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.536186 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.549779 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.554296 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 07:51:35 crc kubenswrapper[4787]: W0127 07:51:35.578611 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-9dc32c1140174765badb0a0858593cf581eaffbc1c3dd9d65bbd0f2a71c9791b WatchSource:0}: Error finding container 9dc32c1140174765badb0a0858593cf581eaffbc1c3dd9d65bbd0f2a71c9791b: Status 404 returned error can't find the container with id 9dc32c1140174765badb0a0858593cf581eaffbc1c3dd9d65bbd0f2a71c9791b Jan 27 07:51:35 crc kubenswrapper[4787]: W0127 07:51:35.581955 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-4d7e83d7c54f2b1f19ec8d4c62b742f0396f9c1bad9521c095d60171521b184b WatchSource:0}: Error finding container 4d7e83d7c54f2b1f19ec8d4c62b742f0396f9c1bad9521c095d60171521b184b: Status 404 returned error can't find the container with id 4d7e83d7c54f2b1f19ec8d4c62b742f0396f9c1bad9521c095d60171521b184b Jan 27 07:51:35 crc kubenswrapper[4787]: W0127 07:51:35.582712 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-968b29380f38d75f8c4712dab15eb59b69a6a38711bfcd9a30e7153b94bf7481 WatchSource:0}: Error finding container 968b29380f38d75f8c4712dab15eb59b69a6a38711bfcd9a30e7153b94bf7481: Status 404 returned error can't find the container with id 968b29380f38d75f8c4712dab15eb59b69a6a38711bfcd9a30e7153b94bf7481 Jan 27 07:51:35 crc kubenswrapper[4787]: W0127 07:51:35.584345 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-5f47cbde35e391682f13ee212fb8ec35a7acb36900d4819ff1c5bfe4f92d494d WatchSource:0}: Error finding container 5f47cbde35e391682f13ee212fb8ec35a7acb36900d4819ff1c5bfe4f92d494d: Status 404 returned error can't find the container with id 5f47cbde35e391682f13ee212fb8ec35a7acb36900d4819ff1c5bfe4f92d494d Jan 27 07:51:35 crc kubenswrapper[4787]: W0127 07:51:35.589843 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-42478cdcba09526979ef7f7fab798423a958d598eecb65e302f65d5c162dc23b WatchSource:0}: Error finding container 42478cdcba09526979ef7f7fab798423a958d598eecb65e302f65d5c162dc23b: Status 404 returned error can't find the container with id 42478cdcba09526979ef7f7fab798423a958d598eecb65e302f65d5c162dc23b Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.626261 4787 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="800ms" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.875663 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.876948 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.876984 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.876994 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:35 crc kubenswrapper[4787]: I0127 07:51:35.877015 4787 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 27 07:51:35 crc kubenswrapper[4787]: E0127 07:51:35.877474 4787 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Jan 27 07:51:36 crc kubenswrapper[4787]: I0127 07:51:36.023067 4787 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Jan 27 07:51:36 crc kubenswrapper[4787]: I0127 07:51:36.023047 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 15:44:04.091825009 +0000 UTC Jan 27 07:51:36 crc kubenswrapper[4787]: I0127 07:51:36.080128 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9dc32c1140174765badb0a0858593cf581eaffbc1c3dd9d65bbd0f2a71c9791b"} Jan 27 07:51:36 crc kubenswrapper[4787]: I0127 07:51:36.082291 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4d7e83d7c54f2b1f19ec8d4c62b742f0396f9c1bad9521c095d60171521b184b"} Jan 27 07:51:36 crc kubenswrapper[4787]: I0127 07:51:36.083861 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"42478cdcba09526979ef7f7fab798423a958d598eecb65e302f65d5c162dc23b"} Jan 27 07:51:36 crc kubenswrapper[4787]: I0127 07:51:36.084836 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5f47cbde35e391682f13ee212fb8ec35a7acb36900d4819ff1c5bfe4f92d494d"} Jan 27 07:51:36 crc kubenswrapper[4787]: I0127 07:51:36.086109 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"968b29380f38d75f8c4712dab15eb59b69a6a38711bfcd9a30e7153b94bf7481"} Jan 27 07:51:36 crc kubenswrapper[4787]: W0127 07:51:36.238825 4787 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Jan 27 07:51:36 crc kubenswrapper[4787]: E0127 07:51:36.238926 4787 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Jan 27 07:51:36 crc kubenswrapper[4787]: W0127 07:51:36.248743 4787 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Jan 27 07:51:36 crc kubenswrapper[4787]: E0127 07:51:36.248803 4787 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Jan 27 07:51:36 crc kubenswrapper[4787]: E0127 07:51:36.427379 4787 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="1.6s" Jan 27 07:51:36 crc kubenswrapper[4787]: W0127 07:51:36.527032 4787 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Jan 27 07:51:36 crc kubenswrapper[4787]: E0127 07:51:36.527178 4787 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Jan 27 07:51:36 crc kubenswrapper[4787]: W0127 07:51:36.608388 4787 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Jan 27 07:51:36 crc kubenswrapper[4787]: E0127 07:51:36.608503 4787 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Jan 27 07:51:36 crc kubenswrapper[4787]: I0127 07:51:36.677942 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:36 crc kubenswrapper[4787]: I0127 07:51:36.679436 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:36 crc kubenswrapper[4787]: I0127 07:51:36.679488 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:36 crc kubenswrapper[4787]: I0127 07:51:36.679502 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:36 crc kubenswrapper[4787]: I0127 07:51:36.679528 4787 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 27 07:51:36 crc kubenswrapper[4787]: E0127 07:51:36.680060 4787 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Jan 27 07:51:36 crc kubenswrapper[4787]: I0127 07:51:36.955259 4787 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 27 07:51:36 crc kubenswrapper[4787]: E0127 07:51:36.956700 4787 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.023422 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 04:08:12.332068776 +0000 UTC Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.023637 4787 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.092100 4787 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2" exitCode=0 Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.092217 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2"} Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.092283 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.093804 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.093848 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.093865 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.094678 4787 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07" exitCode=0 Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.094785 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07"} Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.094853 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.096135 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.096183 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.096204 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.097210 4787 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="6a626256ec689285dfaf54ef25e23d40bef94d10fec21b7eca529c49e527d5a5" exitCode=0 Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.097261 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"6a626256ec689285dfaf54ef25e23d40bef94d10fec21b7eca529c49e527d5a5"} Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.097344 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.098268 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.098940 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.098990 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.099015 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.099715 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.099758 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.099768 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.102743 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d"} Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.102826 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb"} Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.102842 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.102848 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8"} Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.103014 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc"} Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.103899 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.103953 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.103977 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.104945 4787 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf" exitCode=0 Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.104995 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf"} Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.105067 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.107175 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.107232 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.107245 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.666347 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:37 crc kubenswrapper[4787]: I0127 07:51:37.677663 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.022648 4787 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.023629 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-06 23:52:57.026642946 +0000 UTC Jan 27 07:51:38 crc kubenswrapper[4787]: E0127 07:51:38.028723 4787 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="3.2s" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.124091 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b"} Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.124162 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5"} Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.124174 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986"} Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.124186 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df"} Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.129907 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"cae58e072653f1ba9869c3d6a218332a184032d95378dfba6be1d0c683d6ddf1"} Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.129959 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.132732 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.132779 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.132790 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.136502 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0509b185e88f56d56bc6f8429a620d74855d938f1206d6c9193d8e93fc12ae91"} Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.136574 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d407da23fc5babf6cec063fb31861fd8ac0f456a44746a83740c61c1e53a436c"} Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.136592 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d820bf9fe1c788b0bb17535226d96cd310fc188148091fa8b4186dd43baa5f75"} Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.136710 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.139743 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.139788 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.139802 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.142093 4787 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c" exitCode=0 Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.142186 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.142783 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.143142 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c"} Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.143338 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.143352 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.143360 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.144961 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.145030 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.145046 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.280442 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.281880 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.281937 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.281951 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.281983 4787 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 27 07:51:38 crc kubenswrapper[4787]: E0127 07:51:38.282528 4787 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Jan 27 07:51:38 crc kubenswrapper[4787]: W0127 07:51:38.305333 4787 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Jan 27 07:51:38 crc kubenswrapper[4787]: E0127 07:51:38.305415 4787 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Jan 27 07:51:38 crc kubenswrapper[4787]: W0127 07:51:38.528683 4787 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Jan 27 07:51:38 crc kubenswrapper[4787]: E0127 07:51:38.528822 4787 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Jan 27 07:51:38 crc kubenswrapper[4787]: I0127 07:51:38.835475 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.024182 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 21:48:10.406480909 +0000 UTC Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.146538 4787 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f" exitCode=0 Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.146672 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f"} Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.146958 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.149004 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.149038 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.149047 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.154309 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cacb7c35c671a1aaa2121bf6d0b1e05cd37f7a2e3531251a1b4a56d381b97526"} Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.154490 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.155215 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.156050 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.156155 4787 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.156250 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.156162 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.159066 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.159123 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.159137 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.159675 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.159721 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.159739 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.159956 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.160201 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.160234 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.162484 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.163260 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.163283 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.367759 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:39 crc kubenswrapper[4787]: I0127 07:51:39.490276 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.024609 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 00:06:31.001733463 +0000 UTC Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.160520 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.160534 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76"} Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.160575 4787 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.160600 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd"} Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.160612 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.160616 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06"} Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.160737 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22"} Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.160765 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.160631 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.161463 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.161487 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.161516 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.162250 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.162273 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.162281 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.162273 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.162297 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:40 crc kubenswrapper[4787]: I0127 07:51:40.162304 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.025791 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 16:11:44.322232011 +0000 UTC Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.052216 4787 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.172609 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.172661 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.172744 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94"} Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.172881 4787 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.172980 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.175355 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.175388 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.175401 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.175621 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.175661 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.175683 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.176574 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.176614 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.176627 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.482714 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.484676 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.484742 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.484762 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:41 crc kubenswrapper[4787]: I0127 07:51:41.484811 4787 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 27 07:51:42 crc kubenswrapper[4787]: I0127 07:51:42.026407 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 16:51:47.583285955 +0000 UTC Jan 27 07:51:42 crc kubenswrapper[4787]: I0127 07:51:42.174544 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:42 crc kubenswrapper[4787]: I0127 07:51:42.175527 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:42 crc kubenswrapper[4787]: I0127 07:51:42.175592 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:42 crc kubenswrapper[4787]: I0127 07:51:42.175605 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:42 crc kubenswrapper[4787]: I0127 07:51:42.373593 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:42 crc kubenswrapper[4787]: I0127 07:51:42.374153 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:42 crc kubenswrapper[4787]: I0127 07:51:42.375449 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:42 crc kubenswrapper[4787]: I0127 07:51:42.375568 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:42 crc kubenswrapper[4787]: I0127 07:51:42.375633 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:42 crc kubenswrapper[4787]: I0127 07:51:42.490451 4787 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 27 07:51:42 crc kubenswrapper[4787]: I0127 07:51:42.490534 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 27 07:51:43 crc kubenswrapper[4787]: I0127 07:51:43.027267 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 21:34:06.607982225 +0000 UTC Jan 27 07:51:43 crc kubenswrapper[4787]: I0127 07:51:43.168885 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 27 07:51:43 crc kubenswrapper[4787]: I0127 07:51:43.178120 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:43 crc kubenswrapper[4787]: I0127 07:51:43.179712 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:43 crc kubenswrapper[4787]: I0127 07:51:43.179759 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:43 crc kubenswrapper[4787]: I0127 07:51:43.179772 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:44 crc kubenswrapper[4787]: I0127 07:51:44.028043 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 00:08:08.306102948 +0000 UTC Jan 27 07:51:44 crc kubenswrapper[4787]: I0127 07:51:44.956771 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:44 crc kubenswrapper[4787]: I0127 07:51:44.957093 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:44 crc kubenswrapper[4787]: I0127 07:51:44.959235 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:44 crc kubenswrapper[4787]: I0127 07:51:44.959472 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:44 crc kubenswrapper[4787]: I0127 07:51:44.959659 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:45 crc kubenswrapper[4787]: I0127 07:51:45.028875 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 18:34:58.072294113 +0000 UTC Jan 27 07:51:45 crc kubenswrapper[4787]: E0127 07:51:45.180847 4787 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 27 07:51:46 crc kubenswrapper[4787]: I0127 07:51:46.029953 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 01:34:12.62323753 +0000 UTC Jan 27 07:51:47 crc kubenswrapper[4787]: I0127 07:51:47.030671 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 22:00:20.644882248 +0000 UTC Jan 27 07:51:47 crc kubenswrapper[4787]: I0127 07:51:47.247724 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 27 07:51:47 crc kubenswrapper[4787]: I0127 07:51:47.248451 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:47 crc kubenswrapper[4787]: I0127 07:51:47.250776 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:47 crc kubenswrapper[4787]: I0127 07:51:47.250847 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:47 crc kubenswrapper[4787]: I0127 07:51:47.250866 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:48 crc kubenswrapper[4787]: I0127 07:51:48.030871 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 01:48:44.701057067 +0000 UTC Jan 27 07:51:49 crc kubenswrapper[4787]: W0127 07:51:49.000205 4787 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.000297 4787 trace.go:236] Trace[1902932876]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (27-Jan-2026 07:51:38.998) (total time: 10001ms): Jan 27 07:51:49 crc kubenswrapper[4787]: Trace[1902932876]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (07:51:49.000) Jan 27 07:51:49 crc kubenswrapper[4787]: Trace[1902932876]: [10.00149542s] [10.00149542s] END Jan 27 07:51:49 crc kubenswrapper[4787]: E0127 07:51:49.000385 4787 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 27 07:51:49 crc kubenswrapper[4787]: W0127 07:51:49.001195 4787 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.001242 4787 trace.go:236] Trace[1245779429]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (27-Jan-2026 07:51:39.000) (total time: 10000ms): Jan 27 07:51:49 crc kubenswrapper[4787]: Trace[1245779429]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10000ms (07:51:49.001) Jan 27 07:51:49 crc kubenswrapper[4787]: Trace[1245779429]: [10.000806886s] [10.000806886s] END Jan 27 07:51:49 crc kubenswrapper[4787]: E0127 07:51:49.001254 4787 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.024012 4787 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.031273 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 18:28:59.977981277 +0000 UTC Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.068454 4787 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.068832 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.195501 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.197328 4787 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cacb7c35c671a1aaa2121bf6d0b1e05cd37f7a2e3531251a1b4a56d381b97526" exitCode=255 Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.197377 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"cacb7c35c671a1aaa2121bf6d0b1e05cd37f7a2e3531251a1b4a56d381b97526"} Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.197533 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.198393 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.198522 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.198613 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.199935 4787 scope.go:117] "RemoveContainer" containerID="cacb7c35c671a1aaa2121bf6d0b1e05cd37f7a2e3531251a1b4a56d381b97526" Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.375439 4787 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.375533 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.532250 4787 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 27 07:51:49 crc kubenswrapper[4787]: I0127 07:51:49.532315 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 27 07:51:50 crc kubenswrapper[4787]: I0127 07:51:50.032183 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 14:16:31.40203822 +0000 UTC Jan 27 07:51:50 crc kubenswrapper[4787]: I0127 07:51:50.203284 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 27 07:51:50 crc kubenswrapper[4787]: I0127 07:51:50.204741 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3"} Jan 27 07:51:50 crc kubenswrapper[4787]: I0127 07:51:50.204899 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:50 crc kubenswrapper[4787]: I0127 07:51:50.205732 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:50 crc kubenswrapper[4787]: I0127 07:51:50.205766 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:50 crc kubenswrapper[4787]: I0127 07:51:50.205775 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:51 crc kubenswrapper[4787]: I0127 07:51:51.032315 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 09:55:59.087463998 +0000 UTC Jan 27 07:51:52 crc kubenswrapper[4787]: I0127 07:51:52.033082 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 20:18:02.304108474 +0000 UTC Jan 27 07:51:52 crc kubenswrapper[4787]: I0127 07:51:52.492058 4787 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 27 07:51:52 crc kubenswrapper[4787]: I0127 07:51:52.492143 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 27 07:51:53 crc kubenswrapper[4787]: I0127 07:51:53.033438 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 00:18:26.186270961 +0000 UTC Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.033930 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 13:33:57.712014036 +0000 UTC Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.374092 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.374479 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.374937 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.377513 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.377563 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.377573 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.379843 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:51:54 crc kubenswrapper[4787]: E0127 07:51:54.528205 4787 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.531712 4787 trace.go:236] Trace[998286491]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (27-Jan-2026 07:51:43.643) (total time: 10887ms): Jan 27 07:51:54 crc kubenswrapper[4787]: Trace[998286491]: ---"Objects listed" error: 10887ms (07:51:54.531) Jan 27 07:51:54 crc kubenswrapper[4787]: Trace[998286491]: [10.887647157s] [10.887647157s] END Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.531760 4787 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 27 07:51:54 crc kubenswrapper[4787]: E0127 07:51:54.532782 4787 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.534354 4787 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.534759 4787 trace.go:236] Trace[468694635]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (27-Jan-2026 07:51:43.117) (total time: 11417ms): Jan 27 07:51:54 crc kubenswrapper[4787]: Trace[468694635]: ---"Objects listed" error: 11417ms (07:51:54.534) Jan 27 07:51:54 crc kubenswrapper[4787]: Trace[468694635]: [11.41727997s] [11.41727997s] END Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.534833 4787 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.545194 4787 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.640522 4787 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 27 07:51:54 crc kubenswrapper[4787]: I0127 07:51:54.961534 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.016124 4787 apiserver.go:52] "Watching apiserver" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.021133 4787 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.021526 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.022233 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.022318 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.022418 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.022533 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.022602 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.022965 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.023575 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.023637 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.023986 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.024831 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.025008 4787 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.027949 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.028081 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.028228 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.028635 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.028671 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.028702 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.029009 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.029286 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.034246 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 09:47:33.876653392 +0000 UTC Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036061 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036096 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036120 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036140 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036162 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036180 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036212 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036236 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036266 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036298 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036319 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036341 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036357 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036377 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036411 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036427 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036459 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036480 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036495 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036512 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036528 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036563 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036583 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036599 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036616 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036632 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036647 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036666 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036686 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036704 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036741 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036759 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036776 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036793 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036812 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036830 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036845 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036866 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036887 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036912 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036928 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036943 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036959 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036974 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.036991 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037026 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037076 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037110 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037130 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037152 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037173 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037190 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037222 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037245 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037274 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037294 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037310 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037327 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037343 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037360 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037377 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037392 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.037722 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.038448 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.038533 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.038808 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.038810 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.039006 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.039404 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.039821 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.039869 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.039891 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.039909 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.039925 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.039946 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.039967 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.039987 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040005 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040022 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040038 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040054 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040073 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040094 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040109 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040126 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040144 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040164 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040188 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040209 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040434 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040467 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040629 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040662 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040690 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040770 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040796 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040789 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040827 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040894 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040898 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040919 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040944 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040970 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.040994 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041016 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041037 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041056 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041074 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041093 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041109 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041127 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041146 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041163 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041180 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041116 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041197 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041228 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041248 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041276 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041300 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041330 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041423 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041456 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041483 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041502 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041520 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041539 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041575 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041599 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041618 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041636 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041655 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041673 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041691 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041706 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041711 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041752 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041775 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041793 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041811 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041832 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041851 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.041954 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042008 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042029 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042047 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042066 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042088 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042104 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042121 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042140 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042159 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042177 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042195 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042211 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042228 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042260 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042278 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042299 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042314 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042332 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042353 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042375 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042394 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042413 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042430 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042449 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042451 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042469 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042492 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042518 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042541 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042588 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042610 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042630 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042647 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042665 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042682 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042697 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042715 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042733 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042750 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042751 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042766 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042785 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042802 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042819 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042837 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042855 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042874 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042892 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042908 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042926 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042944 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042961 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042978 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042996 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043013 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043030 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043048 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043067 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043085 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043101 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043118 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043136 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.042954 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043155 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043325 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043339 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043388 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043391 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043443 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043472 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043529 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043570 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043592 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043614 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043637 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043684 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043841 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043873 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043894 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043921 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043946 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.043974 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044005 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044024 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044100 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044104 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044114 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044168 4787 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044194 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044222 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044245 4787 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044268 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044291 4787 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044312 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044332 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044518 4787 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044541 4787 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044585 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044605 4787 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044630 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044650 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044672 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044692 4787 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.044713 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.045068 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.045399 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.046374 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.046638 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.046761 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.046786 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.046790 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.047079 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.047689 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.047679 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.048104 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.048107 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.048146 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.047955 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.048858 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.048869 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.049158 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.049489 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.049580 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.050757 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.050887 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.049597 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.051176 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.051297 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.051740 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.051791 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.052103 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.052286 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.052356 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.052375 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.052632 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.052987 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.053112 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.054495 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.055504 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.056498 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.057507 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.057934 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.058265 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.058497 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.058691 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.059251 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.059728 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.059712 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.060178 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.060589 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.060702 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.060945 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.061051 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.061490 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.061873 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.061906 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.061601 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.051942 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.061957 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.062033 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.062098 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.062290 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.062334 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.062784 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.064532 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.064999 4787 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.067308 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:51:55.567280059 +0000 UTC m=+21.219635551 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.067696 4787 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.069361 4787 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.080283 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.069536 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.069899 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.069811 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.080466 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.070257 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.070470 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.078169 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.069374 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.080655 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.080857 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:51:55.58082387 +0000 UTC m=+21.233179372 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.080962 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.081215 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.081658 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.081904 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.081968 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.081988 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.082426 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.082480 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:51:55.582460723 +0000 UTC m=+21.234816215 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.082815 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.082987 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.083202 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.080841 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.083613 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.083704 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.083411 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.083771 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.086985 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.083914 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.083986 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.084026 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.084118 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.084524 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.085015 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.085036 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.086197 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.086741 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.086803 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.087089 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.087085 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.087235 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.086138 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.087525 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.088044 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.088142 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.088355 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.086712 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.087808 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.086966 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.088811 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.088841 4787 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.088922 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 07:51:55.588896552 +0000 UTC m=+21.241252044 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.089095 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.089173 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.089202 4787 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.089131 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.089281 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 07:51:55.589263729 +0000 UTC m=+21.241619221 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.089320 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.089482 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.090058 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.090165 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.090193 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.090700 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.090715 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.091033 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.091493 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.091799 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.091802 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.091795 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.092374 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.092901 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.093834 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.095229 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.096966 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.097119 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.097262 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.097316 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.097937 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.098443 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.096261 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.099763 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.104443 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.104973 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.105295 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.105297 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.105308 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.105394 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.105648 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.105849 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.105899 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.105968 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.106274 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.108660 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.108734 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.108991 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109107 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109157 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109136 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109211 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109230 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109266 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109584 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109627 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109610 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109714 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109749 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109781 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109852 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109907 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109943 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.109981 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.110293 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.110907 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.110946 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.111147 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.111180 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.111476 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.111748 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.111991 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.112034 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.112063 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.112334 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.112520 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.112662 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.113003 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.113210 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.114208 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.114441 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.115388 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.115618 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.116111 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.119185 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.124014 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.124602 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.125092 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.126187 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.128968 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.129178 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.129777 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.132211 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.133003 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.134731 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.135465 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.136761 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.137869 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.137945 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.139233 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.139898 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.140635 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.142004 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.142832 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.143975 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.144639 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.145332 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.145883 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.145896 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.145959 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.146432 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.147247 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.147789 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.148339 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.148586 4787 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.148613 4787 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.148630 4787 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.148643 4787 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.148657 4787 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.148669 4787 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.148682 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.148698 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.148698 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.148711 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.148736 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.148751 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.148919 4787 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149058 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149222 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149241 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149254 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149268 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149285 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149313 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149325 4787 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149449 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149476 4787 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149490 4787 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149504 4787 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149518 4787 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149532 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149544 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.149544 4787 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.150050 4787 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.150185 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.150325 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.150442 4787 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.150586 4787 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.150736 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.150877 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.150979 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.151060 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.151181 4787 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.151280 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.151378 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.151468 4787 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.151575 4787 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.151720 4787 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.151821 4787 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.151910 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.152009 4787 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.152134 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.152262 4787 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.152353 4787 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.152440 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.152605 4787 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.152751 4787 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.152931 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.153074 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.153220 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.153326 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.153483 4787 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.153622 4787 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.153744 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.153847 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.153957 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.154045 4787 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.154182 4787 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.154325 4787 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.154462 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.154620 4787 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.154746 4787 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.154894 4787 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155021 4787 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155170 4787 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155276 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155365 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155491 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155712 4787 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155751 4787 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155764 4787 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155779 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155790 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.150667 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155800 4787 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155844 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155854 4787 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155878 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155888 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155899 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155911 4787 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155923 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155957 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155966 4787 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155977 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.155987 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156000 4787 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156009 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156020 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156030 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156039 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156049 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156059 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156072 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156084 4787 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156094 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156108 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156118 4787 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156127 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156199 4787 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156213 4787 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156222 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156231 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156243 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156252 4787 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156261 4787 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156269 4787 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156278 4787 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156289 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156298 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156312 4787 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156323 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156332 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156342 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156352 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156362 4787 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156372 4787 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156382 4787 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156395 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156406 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156416 4787 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156488 4787 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156498 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156509 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156521 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156531 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156544 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156571 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156584 4787 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156597 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156609 4787 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156620 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156633 4787 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156648 4787 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156658 4787 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156678 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156691 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156713 4787 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156723 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156733 4787 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156743 4787 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156751 4787 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156760 4787 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156770 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156780 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156790 4787 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156801 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156811 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156821 4787 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156831 4787 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156840 4787 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156850 4787 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156687 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156925 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156939 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156949 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156959 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156969 4787 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156981 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.156991 4787 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157001 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157012 4787 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157022 4787 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157031 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157041 4787 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157051 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157059 4787 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157072 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157082 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157092 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157101 4787 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157110 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157119 4787 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157128 4787 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157138 4787 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.157467 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.158803 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.159331 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.160294 4787 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.160397 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.162122 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.163011 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.163413 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.165189 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.165907 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.166988 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.167761 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.169071 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.169652 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.170837 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.171864 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.172507 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.172989 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.173928 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.175421 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.175970 4787 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.177231 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.177466 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.178225 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.178875 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.180273 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.181075 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.182666 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.183377 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.190275 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.204291 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.223628 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.232085 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.235135 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.246868 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.259722 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.270832 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.283285 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.295895 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.307974 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.321356 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cacb7c35c671a1aaa2121bf6d0b1e05cd37f7a2e3531251a1b4a56d381b97526\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:48Z\\\",\\\"message\\\":\\\"W0127 07:51:38.260378 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 07:51:38.260778 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769500298 cert, and key in /tmp/serving-cert-1332914759/serving-signer.crt, /tmp/serving-cert-1332914759/serving-signer.key\\\\nI0127 07:51:38.543192 1 observer_polling.go:159] Starting file observer\\\\nW0127 07:51:38.545642 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 07:51:38.545783 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:38.547951 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1332914759/tls.crt::/tmp/serving-cert-1332914759/tls.key\\\\\\\"\\\\nF0127 07:51:48.940921 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.341216 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.349315 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.382572 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 07:51:55 crc kubenswrapper[4787]: W0127 07:51:55.402469 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-49a5ef49fa90ba7dd3f4ea76e1cb2effd0e2212f43be1e4021cad0a3af69661c WatchSource:0}: Error finding container 49a5ef49fa90ba7dd3f4ea76e1cb2effd0e2212f43be1e4021cad0a3af69661c: Status 404 returned error can't find the container with id 49a5ef49fa90ba7dd3f4ea76e1cb2effd0e2212f43be1e4021cad0a3af69661c Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.404538 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 07:51:55 crc kubenswrapper[4787]: W0127 07:51:55.424791 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-eb8bab198205f7915ba55dd08a7964a0974bacb6c02e89bd3f3f3118a0d9fed1 WatchSource:0}: Error finding container eb8bab198205f7915ba55dd08a7964a0974bacb6c02e89bd3f3f3118a0d9fed1: Status 404 returned error can't find the container with id eb8bab198205f7915ba55dd08a7964a0974bacb6c02e89bd3f3f3118a0d9fed1 Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.662153 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.662353 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:51:56.662325013 +0000 UTC m=+22.314680505 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.662766 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.662831 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.662873 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:51:55 crc kubenswrapper[4787]: I0127 07:51:55.662904 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.662984 4787 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.663034 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:51:56.663026728 +0000 UTC m=+22.315382220 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.663347 4787 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.663460 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.663503 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.663542 4787 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.663470 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:51:56.663451436 +0000 UTC m=+22.315806918 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.663653 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 07:51:56.663632909 +0000 UTC m=+22.315988401 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.663389 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.663673 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.663683 4787 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:55 crc kubenswrapper[4787]: E0127 07:51:55.663707 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 07:51:56.663701501 +0000 UTC m=+22.316056993 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.034855 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 18:47:35.628662276 +0000 UTC Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.223823 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.224372 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.226472 4787 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3" exitCode=255 Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.226566 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3"} Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.226618 4787 scope.go:117] "RemoveContainer" containerID="cacb7c35c671a1aaa2121bf6d0b1e05cd37f7a2e3531251a1b4a56d381b97526" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.227271 4787 scope.go:117] "RemoveContainer" containerID="879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3" Jan 27 07:51:56 crc kubenswrapper[4787]: E0127 07:51:56.227461 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.227662 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"eb8bab198205f7915ba55dd08a7964a0974bacb6c02e89bd3f3f3118a0d9fed1"} Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.229587 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632"} Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.229617 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2"} Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.229626 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"49a5ef49fa90ba7dd3f4ea76e1cb2effd0e2212f43be1e4021cad0a3af69661c"} Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.230821 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a"} Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.230840 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"89e13c9ab2974854ef597032becee5f7a47be69106c6883a052da96b137605d0"} Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.248189 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.266277 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cacb7c35c671a1aaa2121bf6d0b1e05cd37f7a2e3531251a1b4a56d381b97526\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:48Z\\\",\\\"message\\\":\\\"W0127 07:51:38.260378 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 07:51:38.260778 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769500298 cert, and key in /tmp/serving-cert-1332914759/serving-signer.crt, /tmp/serving-cert-1332914759/serving-signer.key\\\\nI0127 07:51:38.543192 1 observer_polling.go:159] Starting file observer\\\\nW0127 07:51:38.545642 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 07:51:38.545783 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:38.547951 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1332914759/tls.crt::/tmp/serving-cert-1332914759/tls.key\\\\\\\"\\\\nF0127 07:51:48.940921 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.283276 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.298714 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.314016 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.325865 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.338087 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.350951 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.364390 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.380216 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cacb7c35c671a1aaa2121bf6d0b1e05cd37f7a2e3531251a1b4a56d381b97526\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:48Z\\\",\\\"message\\\":\\\"W0127 07:51:38.260378 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 07:51:38.260778 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769500298 cert, and key in /tmp/serving-cert-1332914759/serving-signer.crt, /tmp/serving-cert-1332914759/serving-signer.key\\\\nI0127 07:51:38.543192 1 observer_polling.go:159] Starting file observer\\\\nW0127 07:51:38.545642 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 07:51:38.545783 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:38.547951 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1332914759/tls.crt::/tmp/serving-cert-1332914759/tls.key\\\\\\\"\\\\nF0127 07:51:48.940921 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.393582 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.408918 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.422235 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.441657 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.455504 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.468330 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.673303 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.673388 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.673421 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:51:56 crc kubenswrapper[4787]: E0127 07:51:56.673456 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:51:58.673434366 +0000 UTC m=+24.325789868 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.673487 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:51:56 crc kubenswrapper[4787]: I0127 07:51:56.673521 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:51:56 crc kubenswrapper[4787]: E0127 07:51:56.673495 4787 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:51:56 crc kubenswrapper[4787]: E0127 07:51:56.673667 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:51:56 crc kubenswrapper[4787]: E0127 07:51:56.673685 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:51:56 crc kubenswrapper[4787]: E0127 07:51:56.673695 4787 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:56 crc kubenswrapper[4787]: E0127 07:51:56.673670 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:51:58.673660661 +0000 UTC m=+24.326016153 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:51:56 crc kubenswrapper[4787]: E0127 07:51:56.673606 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:51:56 crc kubenswrapper[4787]: E0127 07:51:56.673830 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:51:56 crc kubenswrapper[4787]: E0127 07:51:56.673859 4787 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:56 crc kubenswrapper[4787]: E0127 07:51:56.673727 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 07:51:58.673718932 +0000 UTC m=+24.326074424 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:56 crc kubenswrapper[4787]: E0127 07:51:56.673627 4787 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:51:56 crc kubenswrapper[4787]: E0127 07:51:56.674096 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 07:51:58.673979017 +0000 UTC m=+24.326334539 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:56 crc kubenswrapper[4787]: E0127 07:51:56.674149 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:51:58.67413535 +0000 UTC m=+24.326491082 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.035529 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 10:41:58.416028264 +0000 UTC Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.076245 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.076291 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:51:57 crc kubenswrapper[4787]: E0127 07:51:57.076389 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.076402 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:51:57 crc kubenswrapper[4787]: E0127 07:51:57.076484 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:51:57 crc kubenswrapper[4787]: E0127 07:51:57.076573 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.234976 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.238190 4787 scope.go:117] "RemoveContainer" containerID="879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3" Jan 27 07:51:57 crc kubenswrapper[4787]: E0127 07:51:57.238378 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.252842 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.276847 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.277982 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.290603 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.296820 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.304039 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.307011 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.320851 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.333875 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.347517 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.361538 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.377638 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.394013 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.415343 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.433293 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.446369 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.461686 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.474720 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.488100 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:57 crc kubenswrapper[4787]: I0127 07:51:57.508384 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.036407 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 07:12:05.014677271 +0000 UTC Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.241988 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a"} Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.255618 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.267263 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.278915 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.299058 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.313050 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.326683 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.340050 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.355054 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.369105 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.690113 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.690205 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.690246 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:51:58 crc kubenswrapper[4787]: E0127 07:51:58.690423 4787 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:51:58 crc kubenswrapper[4787]: E0127 07:51:58.690476 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:51:58 crc kubenswrapper[4787]: E0127 07:51:58.690517 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:51:58 crc kubenswrapper[4787]: E0127 07:51:58.690536 4787 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:58 crc kubenswrapper[4787]: E0127 07:51:58.691078 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:52:02.691048629 +0000 UTC m=+28.343404121 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:51:58 crc kubenswrapper[4787]: E0127 07:51:58.691113 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:02.69110783 +0000 UTC m=+28.343463322 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:51:58 crc kubenswrapper[4787]: E0127 07:51:58.691084 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:51:58 crc kubenswrapper[4787]: E0127 07:51:58.691140 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:02.691126421 +0000 UTC m=+28.343481913 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:58 crc kubenswrapper[4787]: E0127 07:51:58.691147 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:51:58 crc kubenswrapper[4787]: E0127 07:51:58.691167 4787 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:58 crc kubenswrapper[4787]: E0127 07:51:58.691231 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:02.691207432 +0000 UTC m=+28.343563094 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.690950 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.691326 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:51:58 crc kubenswrapper[4787]: E0127 07:51:58.691461 4787 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:51:58 crc kubenswrapper[4787]: E0127 07:51:58.691511 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:02.691503398 +0000 UTC m=+28.343858890 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.829071 4787 csr.go:261] certificate signing request csr-rxtqq is approved, waiting to be issued Jan 27 07:51:58 crc kubenswrapper[4787]: I0127 07:51:58.841332 4787 csr.go:257] certificate signing request csr-rxtqq is issued Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.036972 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 07:58:00.068111652 +0000 UTC Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.076041 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.076141 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.076055 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:51:59 crc kubenswrapper[4787]: E0127 07:51:59.076234 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:51:59 crc kubenswrapper[4787]: E0127 07:51:59.076355 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:51:59 crc kubenswrapper[4787]: E0127 07:51:59.076482 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.297199 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-fghc7"] Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.297661 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-fghc7" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.299657 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.299928 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.299984 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.318127 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.330903 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.346756 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.360120 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.371627 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.396994 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6cac1bb0-6b6f-442f-9db4-a25d4f194bb1-hosts-file\") pod \"node-resolver-fghc7\" (UID: \"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\") " pod="openshift-dns/node-resolver-fghc7" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.397067 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8zvj\" (UniqueName: \"kubernetes.io/projected/6cac1bb0-6b6f-442f-9db4-a25d4f194bb1-kube-api-access-c8zvj\") pod \"node-resolver-fghc7\" (UID: \"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\") " pod="openshift-dns/node-resolver-fghc7" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.400570 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.421406 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.435676 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.447487 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.462761 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.497867 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8zvj\" (UniqueName: \"kubernetes.io/projected/6cac1bb0-6b6f-442f-9db4-a25d4f194bb1-kube-api-access-c8zvj\") pod \"node-resolver-fghc7\" (UID: \"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\") " pod="openshift-dns/node-resolver-fghc7" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.497958 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6cac1bb0-6b6f-442f-9db4-a25d4f194bb1-hosts-file\") pod \"node-resolver-fghc7\" (UID: \"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\") " pod="openshift-dns/node-resolver-fghc7" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.498026 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6cac1bb0-6b6f-442f-9db4-a25d4f194bb1-hosts-file\") pod \"node-resolver-fghc7\" (UID: \"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\") " pod="openshift-dns/node-resolver-fghc7" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.506517 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.511179 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.522296 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8zvj\" (UniqueName: \"kubernetes.io/projected/6cac1bb0-6b6f-442f-9db4-a25d4f194bb1-kube-api-access-c8zvj\") pod \"node-resolver-fghc7\" (UID: \"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\") " pod="openshift-dns/node-resolver-fghc7" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.535361 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.564913 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.601008 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.616644 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-fghc7" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.647602 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.673315 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-rqjpz"] Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.673664 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.676453 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.676880 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.678734 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7642m"] Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.678773 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.678813 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.678907 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.679505 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.681080 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.681910 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-q4fh5"] Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.682351 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.683656 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-fqb6r"] Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.683940 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.684433 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.685709 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.691249 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.691592 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.691988 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.692191 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.692303 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.692527 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.692692 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.692932 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.693408 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.693757 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.693926 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.694081 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.699054 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-var-lib-cni-multus\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.699465 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-multus-socket-dir-parent\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.699591 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-run-k8s-cni-cncf-io\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.699680 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-cni-binary-copy\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.699749 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-run-multus-certs\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.699824 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-os-release\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.699897 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-var-lib-kubelet\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.700071 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-etc-kubernetes\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.700165 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-multus-daemon-config\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.700241 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-multus-cni-dir\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.700308 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-var-lib-cni-bin\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.700391 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-hostroot\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.700484 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-multus-conf-dir\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.700608 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-system-cni-dir\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.700709 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-cnibin\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.703943 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-run-netns\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.704049 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz7b2\" (UniqueName: \"kubernetes.io/projected/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-kube-api-access-tz7b2\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.711124 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.733848 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.752315 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.773762 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.805470 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f051e184-acac-47cf-9e04-9df648288715-rootfs\") pod \"machine-config-daemon-q4fh5\" (UID: \"f051e184-acac-47cf-9e04-9df648288715\") " pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.805871 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvg7g\" (UniqueName: \"kubernetes.io/projected/f051e184-acac-47cf-9e04-9df648288715-kube-api-access-zvg7g\") pod \"machine-config-daemon-q4fh5\" (UID: \"f051e184-acac-47cf-9e04-9df648288715\") " pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.805979 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-os-release\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.806070 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0ff88955-7cd9-4af4-9e0e-79614a1d2994-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.806173 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-env-overrides\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.806241 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0ff88955-7cd9-4af4-9e0e-79614a1d2994-cnibin\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.806322 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-multus-cni-dir\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.806402 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-multus-conf-dir\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.806472 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f051e184-acac-47cf-9e04-9df648288715-proxy-tls\") pod \"machine-config-daemon-q4fh5\" (UID: \"f051e184-acac-47cf-9e04-9df648288715\") " pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.806621 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-run-netns\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.806714 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-system-cni-dir\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.806939 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-cnibin\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.807070 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tj4tn\" (UniqueName: \"kubernetes.io/projected/fa44405c-042c-485a-ab6c-912dcd377751-kube-api-access-tj4tn\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.807143 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-systemd-units\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.807216 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-etc-openvswitch\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.807287 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-openvswitch\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.807360 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-log-socket\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.807458 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-systemd\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.807526 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-ovn\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.807613 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fa44405c-042c-485a-ab6c-912dcd377751-ovn-node-metrics-cert\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.807697 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-multus-socket-dir-parent\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.807763 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwnwn\" (UniqueName: \"kubernetes.io/projected/0ff88955-7cd9-4af4-9e0e-79614a1d2994-kube-api-access-nwnwn\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.807829 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-node-log\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.807901 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-cni-binary-copy\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.807964 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-run-multus-certs\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.808049 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f051e184-acac-47cf-9e04-9df648288715-mcd-auth-proxy-config\") pod \"machine-config-daemon-q4fh5\" (UID: \"f051e184-acac-47cf-9e04-9df648288715\") " pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.808113 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-slash\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.806811 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-multus-cni-dir\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.806597 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-multus-conf-dir\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.806908 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-system-cni-dir\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.806276 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-os-release\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.807048 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-cnibin\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.808302 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-multus-socket-dir-parent\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.808393 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.808487 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0ff88955-7cd9-4af4-9e0e-79614a1d2994-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.808576 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-var-lib-kubelet\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.808645 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-etc-kubernetes\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.808707 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0ff88955-7cd9-4af4-9e0e-79614a1d2994-os-release\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.808785 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-ovnkube-script-lib\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.808856 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-var-lib-cni-bin\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.808926 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-hostroot\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.808991 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-multus-daemon-config\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.809045 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-cni-binary-copy\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.809096 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-run-multus-certs\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.809156 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-var-lib-openvswitch\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.809245 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-ovnkube-config\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.809323 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-run-netns\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.810016 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz7b2\" (UniqueName: \"kubernetes.io/projected/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-kube-api-access-tz7b2\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.810139 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-kubelet\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.810233 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0ff88955-7cd9-4af4-9e0e-79614a1d2994-cni-binary-copy\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.810332 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0ff88955-7cd9-4af4-9e0e-79614a1d2994-system-cni-dir\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.810504 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-var-lib-cni-multus\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.810638 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-run-ovn-kubernetes\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.810991 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-run-k8s-cni-cncf-io\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.811329 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-cni-bin\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.811470 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-cni-netd\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.809454 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-etc-kubernetes\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.809468 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-hostroot\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.809507 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-var-lib-cni-bin\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.809533 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-run-netns\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.810736 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-var-lib-cni-multus\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.811033 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-run-k8s-cni-cncf-io\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.811062 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-host-var-lib-kubelet\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.808050 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.809980 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-multus-daemon-config\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.834450 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz7b2\" (UniqueName: \"kubernetes.io/projected/e6f78168-0b0d-464d-b1c7-00bb9a69c0d1-kube-api-access-tz7b2\") pod \"multus-rqjpz\" (UID: \"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\") " pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.840817 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.842868 4787 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-27 07:46:58 +0000 UTC, rotation deadline is 2026-12-08 21:27:31.159508378 +0000 UTC Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.842942 4787 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7573h35m31.316569976s for next certificate rotation Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.857582 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.882145 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.900452 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.912903 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f051e184-acac-47cf-9e04-9df648288715-rootfs\") pod \"machine-config-daemon-q4fh5\" (UID: \"f051e184-acac-47cf-9e04-9df648288715\") " pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.912945 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvg7g\" (UniqueName: \"kubernetes.io/projected/f051e184-acac-47cf-9e04-9df648288715-kube-api-access-zvg7g\") pod \"machine-config-daemon-q4fh5\" (UID: \"f051e184-acac-47cf-9e04-9df648288715\") " pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.912967 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0ff88955-7cd9-4af4-9e0e-79614a1d2994-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.912985 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0ff88955-7cd9-4af4-9e0e-79614a1d2994-cnibin\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913003 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f051e184-acac-47cf-9e04-9df648288715-proxy-tls\") pod \"machine-config-daemon-q4fh5\" (UID: \"f051e184-acac-47cf-9e04-9df648288715\") " pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913020 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-run-netns\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913035 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-env-overrides\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913057 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tj4tn\" (UniqueName: \"kubernetes.io/projected/fa44405c-042c-485a-ab6c-912dcd377751-kube-api-access-tj4tn\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913076 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-systemd-units\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913091 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-etc-openvswitch\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913109 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-openvswitch\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913098 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f051e184-acac-47cf-9e04-9df648288715-rootfs\") pod \"machine-config-daemon-q4fh5\" (UID: \"f051e184-acac-47cf-9e04-9df648288715\") " pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913195 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-log-socket\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913131 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-log-socket\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913246 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-systemd\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913268 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-ovn\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913295 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fa44405c-042c-485a-ab6c-912dcd377751-ovn-node-metrics-cert\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913363 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwnwn\" (UniqueName: \"kubernetes.io/projected/0ff88955-7cd9-4af4-9e0e-79614a1d2994-kube-api-access-nwnwn\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913424 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f051e184-acac-47cf-9e04-9df648288715-mcd-auth-proxy-config\") pod \"machine-config-daemon-q4fh5\" (UID: \"f051e184-acac-47cf-9e04-9df648288715\") " pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913445 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-slash\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913466 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-node-log\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913501 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913522 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0ff88955-7cd9-4af4-9e0e-79614a1d2994-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913562 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0ff88955-7cd9-4af4-9e0e-79614a1d2994-os-release\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913601 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-var-lib-openvswitch\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913631 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-ovnkube-config\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913663 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-ovnkube-script-lib\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913689 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-kubelet\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913716 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0ff88955-7cd9-4af4-9e0e-79614a1d2994-cni-binary-copy\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913764 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0ff88955-7cd9-4af4-9e0e-79614a1d2994-system-cni-dir\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913806 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-run-ovn-kubernetes\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913827 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-cni-bin\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913850 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-cni-netd\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913921 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-cni-netd\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913947 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-systemd\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.913969 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-ovn\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.914275 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0ff88955-7cd9-4af4-9e0e-79614a1d2994-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.914386 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-var-lib-openvswitch\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.914472 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-etc-openvswitch\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.914535 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0ff88955-7cd9-4af4-9e0e-79614a1d2994-system-cni-dir\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.914503 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-run-ovn-kubernetes\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.914584 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-openvswitch\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.914603 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-cni-bin\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.914621 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.914640 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-run-netns\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.914665 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-slash\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.914701 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-node-log\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.914722 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0ff88955-7cd9-4af4-9e0e-79614a1d2994-cnibin\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.914756 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0ff88955-7cd9-4af4-9e0e-79614a1d2994-os-release\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.915206 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0ff88955-7cd9-4af4-9e0e-79614a1d2994-cni-binary-copy\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.915274 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-kubelet\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.915475 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f051e184-acac-47cf-9e04-9df648288715-mcd-auth-proxy-config\") pod \"machine-config-daemon-q4fh5\" (UID: \"f051e184-acac-47cf-9e04-9df648288715\") " pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.915584 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0ff88955-7cd9-4af4-9e0e-79614a1d2994-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.915587 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-ovnkube-config\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.915695 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-ovnkube-script-lib\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.915693 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-systemd-units\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.915980 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-env-overrides\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.920012 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f051e184-acac-47cf-9e04-9df648288715-proxy-tls\") pod \"machine-config-daemon-q4fh5\" (UID: \"f051e184-acac-47cf-9e04-9df648288715\") " pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.920438 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fa44405c-042c-485a-ab6c-912dcd377751-ovn-node-metrics-cert\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.932192 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.935405 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tj4tn\" (UniqueName: \"kubernetes.io/projected/fa44405c-042c-485a-ab6c-912dcd377751-kube-api-access-tj4tn\") pod \"ovnkube-node-7642m\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.936578 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvg7g\" (UniqueName: \"kubernetes.io/projected/f051e184-acac-47cf-9e04-9df648288715-kube-api-access-zvg7g\") pod \"machine-config-daemon-q4fh5\" (UID: \"f051e184-acac-47cf-9e04-9df648288715\") " pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.938568 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwnwn\" (UniqueName: \"kubernetes.io/projected/0ff88955-7cd9-4af4-9e0e-79614a1d2994-kube-api-access-nwnwn\") pod \"multus-additional-cni-plugins-fqb6r\" (UID: \"0ff88955-7cd9-4af4-9e0e-79614a1d2994\") " pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.949149 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.964786 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.980074 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.994890 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-rqjpz" Jan 27 07:51:59 crc kubenswrapper[4787]: I0127 07:51:59.995225 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:51:59Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: W0127 07:52:00.007626 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6f78168_0b0d_464d_b1c7_00bb9a69c0d1.slice/crio-9d585e70eef61281d63cb4cbe323a67195579c8a6d3f54ab2b20a452cdba089c WatchSource:0}: Error finding container 9d585e70eef61281d63cb4cbe323a67195579c8a6d3f54ab2b20a452cdba089c: Status 404 returned error can't find the container with id 9d585e70eef61281d63cb4cbe323a67195579c8a6d3f54ab2b20a452cdba089c Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.013951 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.019910 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.025990 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.035959 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.038112 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 01:34:16.751943024 +0000 UTC Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.039076 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.054747 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: W0127 07:52:00.055282 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf051e184_acac_47cf_9e04_9df648288715.slice/crio-05d452135124e832e7cd32209052e5d584d66c5a304dc432842415d240790523 WatchSource:0}: Error finding container 05d452135124e832e7cd32209052e5d584d66c5a304dc432842415d240790523: Status 404 returned error can't find the container with id 05d452135124e832e7cd32209052e5d584d66c5a304dc432842415d240790523 Jan 27 07:52:00 crc kubenswrapper[4787]: W0127 07:52:00.059054 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ff88955_7cd9_4af4_9e0e_79614a1d2994.slice/crio-896c0966b7257a52016f9deb5a28b1bef853ffadc0f56ff6a3ad934d920d571b WatchSource:0}: Error finding container 896c0966b7257a52016f9deb5a28b1bef853ffadc0f56ff6a3ad934d920d571b: Status 404 returned error can't find the container with id 896c0966b7257a52016f9deb5a28b1bef853ffadc0f56ff6a3ad934d920d571b Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.070181 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.086123 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.254646 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" event={"ID":"0ff88955-7cd9-4af4-9e0e-79614a1d2994","Type":"ContainerStarted","Data":"896c0966b7257a52016f9deb5a28b1bef853ffadc0f56ff6a3ad934d920d571b"} Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.258857 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerStarted","Data":"8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748"} Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.258925 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerStarted","Data":"05d452135124e832e7cd32209052e5d584d66c5a304dc432842415d240790523"} Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.266073 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rqjpz" event={"ID":"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1","Type":"ContainerStarted","Data":"e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e"} Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.266143 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rqjpz" event={"ID":"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1","Type":"ContainerStarted","Data":"9d585e70eef61281d63cb4cbe323a67195579c8a6d3f54ab2b20a452cdba089c"} Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.267589 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-fghc7" event={"ID":"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1","Type":"ContainerStarted","Data":"825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2"} Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.267653 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-fghc7" event={"ID":"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1","Type":"ContainerStarted","Data":"f8ec228b302dbe560c3ebd7e776e3f9531d5a90d8d9d34aaddfe44434b94f91a"} Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.268906 4787 generic.go:334] "Generic (PLEG): container finished" podID="fa44405c-042c-485a-ab6c-912dcd377751" containerID="199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b" exitCode=0 Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.268996 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerDied","Data":"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b"} Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.269066 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerStarted","Data":"40ba2420aaaf3b3cc8595bbc5c0c66623c4fd6910892664a858be15544316d35"} Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.299517 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.330872 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.348267 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.361089 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.382210 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.397000 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.410368 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.425375 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.438423 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.455699 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.478399 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.495063 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.507231 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.518812 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.532701 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.547822 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.560902 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.573992 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.588856 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.603142 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.622956 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.637539 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.651795 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.664758 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.688268 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.701046 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.713210 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.733785 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.932886 4787 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.934751 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.934794 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.934805 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.934913 4787 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.943967 4787 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.944374 4787 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.945705 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.945746 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.945757 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.945775 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.945789 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:00Z","lastTransitionTime":"2026-01-27T07:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:00 crc kubenswrapper[4787]: E0127 07:52:00.961741 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.965090 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.965139 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.965148 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.965165 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.965175 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:00Z","lastTransitionTime":"2026-01-27T07:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:00 crc kubenswrapper[4787]: E0127 07:52:00.978151 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.982629 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.982810 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.982935 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.983038 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:00 crc kubenswrapper[4787]: I0127 07:52:00.983128 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:00Z","lastTransitionTime":"2026-01-27T07:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:00 crc kubenswrapper[4787]: E0127 07:52:00.997642 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:00Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.001347 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.001416 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.001430 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.001451 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.001464 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:01Z","lastTransitionTime":"2026-01-27T07:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:01 crc kubenswrapper[4787]: E0127 07:52:01.017329 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.021964 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.022158 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.022253 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.022337 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.022413 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:01Z","lastTransitionTime":"2026-01-27T07:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:01 crc kubenswrapper[4787]: E0127 07:52:01.034753 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: E0127 07:52:01.035179 4787 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.037231 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.037303 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.037319 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.037346 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.037361 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:01Z","lastTransitionTime":"2026-01-27T07:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.039221 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 17:32:43.107697076 +0000 UTC Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.075951 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.075983 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.076054 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:01 crc kubenswrapper[4787]: E0127 07:52:01.076111 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:01 crc kubenswrapper[4787]: E0127 07:52:01.076235 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:01 crc kubenswrapper[4787]: E0127 07:52:01.076340 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.140791 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.140830 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.140839 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.140856 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.140868 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:01Z","lastTransitionTime":"2026-01-27T07:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.243660 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.243700 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.243709 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.243727 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.243736 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:01Z","lastTransitionTime":"2026-01-27T07:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.273908 4787 generic.go:334] "Generic (PLEG): container finished" podID="0ff88955-7cd9-4af4-9e0e-79614a1d2994" containerID="5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8" exitCode=0 Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.274017 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" event={"ID":"0ff88955-7cd9-4af4-9e0e-79614a1d2994","Type":"ContainerDied","Data":"5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8"} Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.276169 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerStarted","Data":"cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950"} Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.291366 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.308542 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.334069 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.351030 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.351350 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.351373 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.351383 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.351399 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.351409 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:01Z","lastTransitionTime":"2026-01-27T07:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.351933 4787 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.352807 4787 scope.go:117] "RemoveContainer" containerID="879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3" Jan 27 07:52:01 crc kubenswrapper[4787]: E0127 07:52:01.352965 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.364947 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.382620 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.396707 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.411671 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.427177 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.449667 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.454696 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.454742 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.454755 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.454779 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.454797 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:01Z","lastTransitionTime":"2026-01-27T07:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.463422 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.476036 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.490731 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.512460 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.535571 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.550031 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.557873 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.557914 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.557927 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.557948 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.557959 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:01Z","lastTransitionTime":"2026-01-27T07:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.563183 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.579954 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.596699 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.615249 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.644631 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.659962 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.662573 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.662621 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.662630 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.662644 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.662654 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:01Z","lastTransitionTime":"2026-01-27T07:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.674326 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.690136 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.703630 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.720189 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.736795 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.750726 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:01Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.767815 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.767867 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.767882 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.767903 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.767919 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:01Z","lastTransitionTime":"2026-01-27T07:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.871858 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.871904 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.871914 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.871932 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.871943 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:01Z","lastTransitionTime":"2026-01-27T07:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.976989 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.977574 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.977596 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.977620 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:01 crc kubenswrapper[4787]: I0127 07:52:01.977635 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:01Z","lastTransitionTime":"2026-01-27T07:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.040710 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 17:55:48.648360372 +0000 UTC Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.080504 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.080568 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.080579 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.080597 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.080610 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:02Z","lastTransitionTime":"2026-01-27T07:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.183275 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.183315 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.183325 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.183344 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.183357 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:02Z","lastTransitionTime":"2026-01-27T07:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.287926 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.288592 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.288604 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.288624 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.288662 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:02Z","lastTransitionTime":"2026-01-27T07:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.292307 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerStarted","Data":"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.292356 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerStarted","Data":"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.292372 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerStarted","Data":"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.292386 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerStarted","Data":"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.292399 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerStarted","Data":"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.294570 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" event={"ID":"0ff88955-7cd9-4af4-9e0e-79614a1d2994","Type":"ContainerStarted","Data":"a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.310520 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.335351 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.347517 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-rqwfc"] Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.348297 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-rqwfc" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.355585 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.355950 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.357601 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.357890 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.358017 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.372830 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.388304 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.393029 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.393084 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.393099 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.393120 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.393135 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:02Z","lastTransitionTime":"2026-01-27T07:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.402949 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.424828 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.441452 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.449496 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fe88a860-6bb6-40ef-8ed7-c16f06fad8d3-host\") pod \"node-ca-rqwfc\" (UID: \"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\") " pod="openshift-image-registry/node-ca-rqwfc" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.449571 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fe88a860-6bb6-40ef-8ed7-c16f06fad8d3-serviceca\") pod \"node-ca-rqwfc\" (UID: \"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\") " pod="openshift-image-registry/node-ca-rqwfc" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.449747 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scmbf\" (UniqueName: \"kubernetes.io/projected/fe88a860-6bb6-40ef-8ed7-c16f06fad8d3-kube-api-access-scmbf\") pod \"node-ca-rqwfc\" (UID: \"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\") " pod="openshift-image-registry/node-ca-rqwfc" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.455893 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.468674 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.484184 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.496003 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.496051 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.496062 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.496115 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.496127 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:02Z","lastTransitionTime":"2026-01-27T07:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.499980 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.513110 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.524989 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.544240 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.551380 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fe88a860-6bb6-40ef-8ed7-c16f06fad8d3-host\") pod \"node-ca-rqwfc\" (UID: \"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\") " pod="openshift-image-registry/node-ca-rqwfc" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.551430 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fe88a860-6bb6-40ef-8ed7-c16f06fad8d3-serviceca\") pod \"node-ca-rqwfc\" (UID: \"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\") " pod="openshift-image-registry/node-ca-rqwfc" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.551481 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scmbf\" (UniqueName: \"kubernetes.io/projected/fe88a860-6bb6-40ef-8ed7-c16f06fad8d3-kube-api-access-scmbf\") pod \"node-ca-rqwfc\" (UID: \"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\") " pod="openshift-image-registry/node-ca-rqwfc" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.551863 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fe88a860-6bb6-40ef-8ed7-c16f06fad8d3-host\") pod \"node-ca-rqwfc\" (UID: \"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\") " pod="openshift-image-registry/node-ca-rqwfc" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.552772 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fe88a860-6bb6-40ef-8ed7-c16f06fad8d3-serviceca\") pod \"node-ca-rqwfc\" (UID: \"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\") " pod="openshift-image-registry/node-ca-rqwfc" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.559177 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.572186 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.577294 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scmbf\" (UniqueName: \"kubernetes.io/projected/fe88a860-6bb6-40ef-8ed7-c16f06fad8d3-kube-api-access-scmbf\") pod \"node-ca-rqwfc\" (UID: \"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\") " pod="openshift-image-registry/node-ca-rqwfc" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.584659 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.595895 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.598693 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.598737 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.598750 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.598769 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.598780 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:02Z","lastTransitionTime":"2026-01-27T07:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.610329 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.625603 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.638077 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.648860 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.661648 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.668508 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-rqwfc" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.676902 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: W0127 07:52:02.681916 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfe88a860_6bb6_40ef_8ed7_c16f06fad8d3.slice/crio-15ca82168208a3627bfee0d952e096bff064168ebc2b72d0463c309c4b7bd155 WatchSource:0}: Error finding container 15ca82168208a3627bfee0d952e096bff064168ebc2b72d0463c309c4b7bd155: Status 404 returned error can't find the container with id 15ca82168208a3627bfee0d952e096bff064168ebc2b72d0463c309c4b7bd155 Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.697218 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.701904 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.701966 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.701980 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.702025 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.702036 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:02Z","lastTransitionTime":"2026-01-27T07:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.714287 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.731788 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.752497 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.753043 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:52:02 crc kubenswrapper[4787]: E0127 07:52:02.753146 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:52:10.753122142 +0000 UTC m=+36.405477634 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.753211 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.753244 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.753284 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.753310 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:02 crc kubenswrapper[4787]: E0127 07:52:02.753443 4787 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:52:02 crc kubenswrapper[4787]: E0127 07:52:02.753498 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:10.753485249 +0000 UTC m=+36.405840741 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:52:02 crc kubenswrapper[4787]: E0127 07:52:02.753968 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:52:02 crc kubenswrapper[4787]: E0127 07:52:02.753987 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:52:02 crc kubenswrapper[4787]: E0127 07:52:02.753999 4787 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:02 crc kubenswrapper[4787]: E0127 07:52:02.754024 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:10.754016959 +0000 UTC m=+36.406372451 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:02 crc kubenswrapper[4787]: E0127 07:52:02.754054 4787 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:52:02 crc kubenswrapper[4787]: E0127 07:52:02.754076 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:10.754067891 +0000 UTC m=+36.406423373 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:52:02 crc kubenswrapper[4787]: E0127 07:52:02.754118 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:52:02 crc kubenswrapper[4787]: E0127 07:52:02.754127 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:52:02 crc kubenswrapper[4787]: E0127 07:52:02.754134 4787 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:02 crc kubenswrapper[4787]: E0127 07:52:02.754151 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:10.754146003 +0000 UTC m=+36.406501495 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.804860 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.804897 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.804908 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.804926 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.804938 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:02Z","lastTransitionTime":"2026-01-27T07:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.908842 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.908911 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.908927 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.908952 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:02 crc kubenswrapper[4787]: I0127 07:52:02.908969 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:02Z","lastTransitionTime":"2026-01-27T07:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.012450 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.012500 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.012511 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.012530 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.012542 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:03Z","lastTransitionTime":"2026-01-27T07:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.041959 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 13:40:58.558283305 +0000 UTC Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.075705 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.075769 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.075705 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:03 crc kubenswrapper[4787]: E0127 07:52:03.075940 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:03 crc kubenswrapper[4787]: E0127 07:52:03.076112 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:03 crc kubenswrapper[4787]: E0127 07:52:03.076245 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.114760 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.114802 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.114812 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.114827 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.114837 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:03Z","lastTransitionTime":"2026-01-27T07:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.218410 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.218463 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.218474 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.218495 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.218510 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:03Z","lastTransitionTime":"2026-01-27T07:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.304835 4787 generic.go:334] "Generic (PLEG): container finished" podID="0ff88955-7cd9-4af4-9e0e-79614a1d2994" containerID="a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6" exitCode=0 Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.304913 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" event={"ID":"0ff88955-7cd9-4af4-9e0e-79614a1d2994","Type":"ContainerDied","Data":"a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.310218 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerStarted","Data":"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.311348 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-rqwfc" event={"ID":"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3","Type":"ContainerStarted","Data":"f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.311372 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-rqwfc" event={"ID":"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3","Type":"ContainerStarted","Data":"15ca82168208a3627bfee0d952e096bff064168ebc2b72d0463c309c4b7bd155"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.321041 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.321066 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.321075 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.321089 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.321100 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:03Z","lastTransitionTime":"2026-01-27T07:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.339108 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.355411 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.369867 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.383288 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.398786 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.412056 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.424253 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.425576 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.425625 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.425635 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.425653 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.425663 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:03Z","lastTransitionTime":"2026-01-27T07:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.435617 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.445585 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.471107 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.491329 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.505600 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.526097 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.527765 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.527802 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.527812 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.527830 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.527841 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:03Z","lastTransitionTime":"2026-01-27T07:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.543794 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.561463 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.585886 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.608020 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.627652 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.630786 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.630840 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.630855 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.630876 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.630890 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:03Z","lastTransitionTime":"2026-01-27T07:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.642786 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.658781 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.673188 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.688111 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.700082 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.710419 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.733884 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.733935 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.733946 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.733968 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.733981 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:03Z","lastTransitionTime":"2026-01-27T07:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.754808 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.795897 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.829041 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.837343 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.837390 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.837405 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.837434 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.837449 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:03Z","lastTransitionTime":"2026-01-27T07:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.871480 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.910399 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.940757 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.940830 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.940849 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.940876 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.940896 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:03Z","lastTransitionTime":"2026-01-27T07:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:03 crc kubenswrapper[4787]: I0127 07:52:03.960382 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:03Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.042305 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 20:00:56.153231229 +0000 UTC Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.045900 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.045965 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.045977 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.045995 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.046012 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:04Z","lastTransitionTime":"2026-01-27T07:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.149125 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.149181 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.149195 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.149212 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.149224 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:04Z","lastTransitionTime":"2026-01-27T07:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.252890 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.252976 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.252999 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.253028 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.253064 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:04Z","lastTransitionTime":"2026-01-27T07:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.324083 4787 generic.go:334] "Generic (PLEG): container finished" podID="0ff88955-7cd9-4af4-9e0e-79614a1d2994" containerID="82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487" exitCode=0 Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.324137 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" event={"ID":"0ff88955-7cd9-4af4-9e0e-79614a1d2994","Type":"ContainerDied","Data":"82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487"} Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.345224 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.356970 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.357098 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.357126 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.357165 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.357193 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:04Z","lastTransitionTime":"2026-01-27T07:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.372656 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.390221 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.403400 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.419276 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.435407 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.447973 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.460119 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.460164 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.460180 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.460204 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.460218 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:04Z","lastTransitionTime":"2026-01-27T07:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.463690 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.477983 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.496089 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.521444 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.549328 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.562999 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.563062 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.563114 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.563148 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.563170 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:04Z","lastTransitionTime":"2026-01-27T07:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.567142 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.584734 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.600917 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:04Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.666472 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.666520 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.666532 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.666575 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.666589 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:04Z","lastTransitionTime":"2026-01-27T07:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.770271 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.770359 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.770371 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.770388 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.770400 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:04Z","lastTransitionTime":"2026-01-27T07:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.851351 4787 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.879020 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.879054 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.879062 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.879080 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.879091 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:04Z","lastTransitionTime":"2026-01-27T07:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.983351 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.983402 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.983414 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.983436 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:04 crc kubenswrapper[4787]: I0127 07:52:04.983449 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:04Z","lastTransitionTime":"2026-01-27T07:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.042688 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 21:55:45.754012847 +0000 UTC Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.076805 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.076860 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:05 crc kubenswrapper[4787]: E0127 07:52:05.077025 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.077113 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:05 crc kubenswrapper[4787]: E0127 07:52:05.077285 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:05 crc kubenswrapper[4787]: E0127 07:52:05.077418 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.088769 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.089179 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.089189 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.089206 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.089218 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:05Z","lastTransitionTime":"2026-01-27T07:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.105451 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.122536 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.137800 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.153128 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.166740 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.180987 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.191326 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.191668 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.191761 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.191913 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.192061 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:05Z","lastTransitionTime":"2026-01-27T07:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.199635 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.214830 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.227496 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.243618 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.260726 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.282925 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.297660 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.297743 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.297754 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.297772 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.297782 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:05Z","lastTransitionTime":"2026-01-27T07:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.297934 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.312647 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.328693 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.331649 4787 generic.go:334] "Generic (PLEG): container finished" podID="0ff88955-7cd9-4af4-9e0e-79614a1d2994" containerID="f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823" exitCode=0 Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.331778 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" event={"ID":"0ff88955-7cd9-4af4-9e0e-79614a1d2994","Type":"ContainerDied","Data":"f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823"} Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.345989 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerStarted","Data":"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413"} Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.347966 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.375601 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.390106 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.400685 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.400730 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.400744 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.400764 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.400776 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:05Z","lastTransitionTime":"2026-01-27T07:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.410464 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.424849 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.439727 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.461810 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.476397 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.506002 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.506054 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.506069 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.506088 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.506100 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:05Z","lastTransitionTime":"2026-01-27T07:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.508829 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.549839 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.590244 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.608641 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.608682 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.608693 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.608712 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.608727 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:05Z","lastTransitionTime":"2026-01-27T07:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.629609 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.669302 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.713692 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.713726 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.713736 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.713755 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.713766 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:05Z","lastTransitionTime":"2026-01-27T07:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.714716 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.751266 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.817299 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.817352 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.817366 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.817385 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.817402 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:05Z","lastTransitionTime":"2026-01-27T07:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.922350 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.922396 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.922419 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.922487 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:05 crc kubenswrapper[4787]: I0127 07:52:05.922505 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:05Z","lastTransitionTime":"2026-01-27T07:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.025242 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.025277 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.025287 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.025301 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.025311 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:06Z","lastTransitionTime":"2026-01-27T07:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.043632 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 18:00:10.466147647 +0000 UTC Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.148685 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.148721 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.148731 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.148746 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.148754 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:06Z","lastTransitionTime":"2026-01-27T07:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.251296 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.251331 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.251339 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.251352 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.251361 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:06Z","lastTransitionTime":"2026-01-27T07:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.353204 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.353257 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.353272 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.353292 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.353308 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:06Z","lastTransitionTime":"2026-01-27T07:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.353793 4787 generic.go:334] "Generic (PLEG): container finished" podID="0ff88955-7cd9-4af4-9e0e-79614a1d2994" containerID="c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2" exitCode=0 Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.353918 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" event={"ID":"0ff88955-7cd9-4af4-9e0e-79614a1d2994","Type":"ContainerDied","Data":"c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2"} Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.375504 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.395675 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.413326 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.427394 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.440000 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.456245 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.456290 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.456299 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.456313 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.456323 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:06Z","lastTransitionTime":"2026-01-27T07:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.459171 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.473469 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.485670 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.506809 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.522937 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.539196 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.559867 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.559991 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.560010 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.560033 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.560046 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:06Z","lastTransitionTime":"2026-01-27T07:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.561230 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.577422 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.590176 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.600099 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:06Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.663833 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.664219 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.664229 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.664243 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.664253 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:06Z","lastTransitionTime":"2026-01-27T07:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.767685 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.767746 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.767768 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.767793 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.767811 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:06Z","lastTransitionTime":"2026-01-27T07:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.870851 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.870899 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.870910 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.870929 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.870948 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:06Z","lastTransitionTime":"2026-01-27T07:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.974531 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.974593 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.974604 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.974623 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:06 crc kubenswrapper[4787]: I0127 07:52:06.974634 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:06Z","lastTransitionTime":"2026-01-27T07:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.044824 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 19:42:28.099317954 +0000 UTC Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.075801 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.075874 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.075821 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:07 crc kubenswrapper[4787]: E0127 07:52:07.076050 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:07 crc kubenswrapper[4787]: E0127 07:52:07.076243 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:07 crc kubenswrapper[4787]: E0127 07:52:07.076393 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.078133 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.078174 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.078185 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.078204 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.078217 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:07Z","lastTransitionTime":"2026-01-27T07:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.189089 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.189163 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.189180 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.189200 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.189211 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:07Z","lastTransitionTime":"2026-01-27T07:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.292436 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.292483 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.292494 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.292509 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.292520 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:07Z","lastTransitionTime":"2026-01-27T07:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.362123 4787 generic.go:334] "Generic (PLEG): container finished" podID="0ff88955-7cd9-4af4-9e0e-79614a1d2994" containerID="0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710" exitCode=0 Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.362201 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" event={"ID":"0ff88955-7cd9-4af4-9e0e-79614a1d2994","Type":"ContainerDied","Data":"0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710"} Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.370352 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerStarted","Data":"6fc438285725e8717b0f840dbe429df899942e4ffb4df80a73d92b32cc4635b5"} Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.370856 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.371003 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.384876 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.395525 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.395573 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.395583 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.395596 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.395607 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:07Z","lastTransitionTime":"2026-01-27T07:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.402456 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.423397 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.437817 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.437916 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.439204 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.451850 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.465102 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.488334 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.498578 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.498639 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.498655 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.498681 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.498695 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:07Z","lastTransitionTime":"2026-01-27T07:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.504408 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.521003 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.546284 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.563773 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.580889 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.594132 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.602951 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.602998 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.603010 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.603031 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.603044 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:07Z","lastTransitionTime":"2026-01-27T07:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.608365 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.619841 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.633570 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.660071 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.678065 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.695979 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.706034 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.706089 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.706101 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.706124 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.706137 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:07Z","lastTransitionTime":"2026-01-27T07:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.712722 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.734390 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.762904 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fc438285725e8717b0f840dbe429df899942e4ffb4df80a73d92b32cc4635b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.783194 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.795521 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.805944 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.809474 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.809512 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.809540 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.809573 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.809585 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:07Z","lastTransitionTime":"2026-01-27T07:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.818141 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.833009 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.849091 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.861491 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.872409 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:07Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.913147 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.913201 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.913210 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.913226 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:07 crc kubenswrapper[4787]: I0127 07:52:07.913238 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:07Z","lastTransitionTime":"2026-01-27T07:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.017257 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.017303 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.017312 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.017326 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.017336 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:08Z","lastTransitionTime":"2026-01-27T07:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.045894 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 07:23:35.4983036 +0000 UTC Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.120534 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.120620 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.120632 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.120653 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.120669 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:08Z","lastTransitionTime":"2026-01-27T07:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.223360 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.223409 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.223422 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.223441 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.223453 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:08Z","lastTransitionTime":"2026-01-27T07:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.326426 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.326487 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.326499 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.326518 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.326530 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:08Z","lastTransitionTime":"2026-01-27T07:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.378411 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" event={"ID":"0ff88955-7cd9-4af4-9e0e-79614a1d2994","Type":"ContainerStarted","Data":"47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b"} Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.378529 4787 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.392657 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.406723 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.419816 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.429700 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.429831 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.429862 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.429890 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.429910 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:08Z","lastTransitionTime":"2026-01-27T07:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.436868 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.455033 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.477061 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.496699 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fc438285725e8717b0f840dbe429df899942e4ffb4df80a73d92b32cc4635b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.517886 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.532258 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.532307 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.532321 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.532343 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.532358 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:08Z","lastTransitionTime":"2026-01-27T07:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.534462 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.548507 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.567010 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.578606 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.589460 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.600818 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.616240 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:08Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.635673 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.635747 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.635766 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.635795 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.635813 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:08Z","lastTransitionTime":"2026-01-27T07:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.738960 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.739028 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.739038 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.739055 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.739068 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:08Z","lastTransitionTime":"2026-01-27T07:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.842268 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.842334 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.842356 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.842388 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.842408 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:08Z","lastTransitionTime":"2026-01-27T07:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.945939 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.946002 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.946016 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.946042 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:08 crc kubenswrapper[4787]: I0127 07:52:08.946058 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:08Z","lastTransitionTime":"2026-01-27T07:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.046577 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 20:30:44.757422428 +0000 UTC Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.049428 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.049478 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.049495 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.049514 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.049528 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:09Z","lastTransitionTime":"2026-01-27T07:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.077314 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.077377 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.077328 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:09 crc kubenswrapper[4787]: E0127 07:52:09.077526 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:09 crc kubenswrapper[4787]: E0127 07:52:09.077640 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:09 crc kubenswrapper[4787]: E0127 07:52:09.077709 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.152458 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.152504 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.152514 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.152533 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.152544 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:09Z","lastTransitionTime":"2026-01-27T07:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.255258 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.255295 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.255305 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.255319 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.255327 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:09Z","lastTransitionTime":"2026-01-27T07:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.357887 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.357939 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.357949 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.357978 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.357991 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:09Z","lastTransitionTime":"2026-01-27T07:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.381684 4787 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.461304 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.461406 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.461439 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.461459 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.461471 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:09Z","lastTransitionTime":"2026-01-27T07:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.565911 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.565952 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.565961 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.565978 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.565990 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:09Z","lastTransitionTime":"2026-01-27T07:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.668483 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.668527 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.668539 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.668572 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.668586 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:09Z","lastTransitionTime":"2026-01-27T07:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.771847 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.771895 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.771909 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.771927 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.771942 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:09Z","lastTransitionTime":"2026-01-27T07:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.875852 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.875982 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.876002 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.876067 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.876085 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:09Z","lastTransitionTime":"2026-01-27T07:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.979707 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.979762 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.979780 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.979807 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:09 crc kubenswrapper[4787]: I0127 07:52:09.979826 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:09Z","lastTransitionTime":"2026-01-27T07:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.047334 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 06:24:10.22629506 +0000 UTC Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.082650 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.082693 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.082703 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.082720 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.082732 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:10Z","lastTransitionTime":"2026-01-27T07:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.185114 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.185147 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.185155 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.185175 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.185189 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:10Z","lastTransitionTime":"2026-01-27T07:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.288054 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.288112 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.288126 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.288150 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.288165 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:10Z","lastTransitionTime":"2026-01-27T07:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.386210 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/0.log" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.389830 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.389871 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.389886 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.389906 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.389920 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:10Z","lastTransitionTime":"2026-01-27T07:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.391412 4787 generic.go:334] "Generic (PLEG): container finished" podID="fa44405c-042c-485a-ab6c-912dcd377751" containerID="6fc438285725e8717b0f840dbe429df899942e4ffb4df80a73d92b32cc4635b5" exitCode=1 Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.391472 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerDied","Data":"6fc438285725e8717b0f840dbe429df899942e4ffb4df80a73d92b32cc4635b5"} Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.392195 4787 scope.go:117] "RemoveContainer" containerID="6fc438285725e8717b0f840dbe429df899942e4ffb4df80a73d92b32cc4635b5" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.404726 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.418039 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.435858 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.454154 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.468936 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.486677 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.506797 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.506867 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.506885 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.506912 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.506933 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:10Z","lastTransitionTime":"2026-01-27T07:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.520044 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.568926 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.595574 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.609830 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.609868 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.609877 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.609893 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.609903 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:10Z","lastTransitionTime":"2026-01-27T07:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.621480 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.643810 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.664214 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fc438285725e8717b0f840dbe429df899942e4ffb4df80a73d92b32cc4635b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fc438285725e8717b0f840dbe429df899942e4ffb4df80a73d92b32cc4635b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:10Z\\\",\\\"message\\\":\\\"dler 7\\\\nI0127 07:52:09.988526 6153 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0127 07:52:09.988682 6153 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.988716 6153 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.988908 6153 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.989091 6153 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.989155 6153 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.989230 6153 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.989309 6153 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.677705 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.692606 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.703825 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:10Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.713080 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.713161 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.713174 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.713214 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.713230 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:10Z","lastTransitionTime":"2026-01-27T07:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.815828 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.815886 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.815900 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.815919 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.815932 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:10Z","lastTransitionTime":"2026-01-27T07:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.850624 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.850764 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.850796 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.850825 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:10 crc kubenswrapper[4787]: E0127 07:52:10.850867 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:52:26.850831208 +0000 UTC m=+52.503186700 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.850938 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:10 crc kubenswrapper[4787]: E0127 07:52:10.850951 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:52:10 crc kubenswrapper[4787]: E0127 07:52:10.850963 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:52:10 crc kubenswrapper[4787]: E0127 07:52:10.850976 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:52:10 crc kubenswrapper[4787]: E0127 07:52:10.850985 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:52:10 crc kubenswrapper[4787]: E0127 07:52:10.850994 4787 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:10 crc kubenswrapper[4787]: E0127 07:52:10.850998 4787 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:10 crc kubenswrapper[4787]: E0127 07:52:10.851020 4787 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:52:10 crc kubenswrapper[4787]: E0127 07:52:10.851054 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:26.851039453 +0000 UTC m=+52.503394945 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:10 crc kubenswrapper[4787]: E0127 07:52:10.851074 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:26.851066753 +0000 UTC m=+52.503422245 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:10 crc kubenswrapper[4787]: E0127 07:52:10.851142 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:26.851111004 +0000 UTC m=+52.503466666 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:52:10 crc kubenswrapper[4787]: E0127 07:52:10.851169 4787 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:52:10 crc kubenswrapper[4787]: E0127 07:52:10.851308 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:26.851275477 +0000 UTC m=+52.503630999 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.920178 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.920276 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.920294 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.920336 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:10 crc kubenswrapper[4787]: I0127 07:52:10.920349 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:10Z","lastTransitionTime":"2026-01-27T07:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.023927 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.024041 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.024061 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.024091 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.024114 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:11Z","lastTransitionTime":"2026-01-27T07:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.048293 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 17:45:39.377574685 +0000 UTC Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.076369 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.076463 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:11 crc kubenswrapper[4787]: E0127 07:52:11.076669 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.076699 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:11 crc kubenswrapper[4787]: E0127 07:52:11.076948 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:11 crc kubenswrapper[4787]: E0127 07:52:11.077155 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.127300 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.127357 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.127371 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.127399 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.127415 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:11Z","lastTransitionTime":"2026-01-27T07:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.229929 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.230003 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.230023 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.230051 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.230071 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:11Z","lastTransitionTime":"2026-01-27T07:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:11 crc kubenswrapper[4787]: E0127 07:52:11.256193 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.264938 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.264983 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.265000 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.265022 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.265040 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:11Z","lastTransitionTime":"2026-01-27T07:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:11 crc kubenswrapper[4787]: E0127 07:52:11.282141 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.286822 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.286917 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.286945 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.286982 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.287009 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:11Z","lastTransitionTime":"2026-01-27T07:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:11 crc kubenswrapper[4787]: E0127 07:52:11.304483 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.312824 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.312889 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.312910 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.312939 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.312960 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:11Z","lastTransitionTime":"2026-01-27T07:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:11 crc kubenswrapper[4787]: E0127 07:52:11.337274 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.341908 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.341970 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.341983 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.342009 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.342023 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:11Z","lastTransitionTime":"2026-01-27T07:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:11 crc kubenswrapper[4787]: E0127 07:52:11.357891 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: E0127 07:52:11.358050 4787 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.360328 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.360373 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.360386 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.360408 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.360420 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:11Z","lastTransitionTime":"2026-01-27T07:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.397105 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/0.log" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.400052 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerStarted","Data":"023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995"} Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.400254 4787 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.419037 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.432425 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.449366 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.463348 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.463408 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.463425 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.463452 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.463476 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:11Z","lastTransitionTime":"2026-01-27T07:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.467451 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.494175 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.514728 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.536984 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fc438285725e8717b0f840dbe429df899942e4ffb4df80a73d92b32cc4635b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:10Z\\\",\\\"message\\\":\\\"dler 7\\\\nI0127 07:52:09.988526 6153 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0127 07:52:09.988682 6153 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.988716 6153 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.988908 6153 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.989091 6153 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.989155 6153 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.989230 6153 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.989309 6153 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.549935 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.562422 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.566589 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.566624 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.566636 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.566653 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.566666 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:11Z","lastTransitionTime":"2026-01-27T07:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.580731 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.598593 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.615622 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.631116 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.653429 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.668874 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.668922 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.668934 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.668948 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.668959 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:11Z","lastTransitionTime":"2026-01-27T07:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.673371 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:11Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.772283 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.772336 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.772348 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.772369 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.772383 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:11Z","lastTransitionTime":"2026-01-27T07:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.876326 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.876397 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.876415 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.876444 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.876467 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:11Z","lastTransitionTime":"2026-01-27T07:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.979371 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.979947 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.980054 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.980151 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:11 crc kubenswrapper[4787]: I0127 07:52:11.980234 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:11Z","lastTransitionTime":"2026-01-27T07:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.049539 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 06:37:12.216947288 +0000 UTC Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.077026 4787 scope.go:117] "RemoveContainer" containerID="879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.083512 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.083597 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.083617 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.083640 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.083655 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:12Z","lastTransitionTime":"2026-01-27T07:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.112960 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5"] Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.113820 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.116328 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.116798 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.130033 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.146887 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.166193 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.183620 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.186469 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.186565 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.186577 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.186592 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.186603 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:12Z","lastTransitionTime":"2026-01-27T07:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.200433 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.214920 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.229108 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.243228 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.268511 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/643aaef9-e302-436b-943e-940480ef74fc-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vhss5\" (UID: \"643aaef9-e302-436b-943e-940480ef74fc\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.268678 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/643aaef9-e302-436b-943e-940480ef74fc-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vhss5\" (UID: \"643aaef9-e302-436b-943e-940480ef74fc\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.268718 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8q989\" (UniqueName: \"kubernetes.io/projected/643aaef9-e302-436b-943e-940480ef74fc-kube-api-access-8q989\") pod \"ovnkube-control-plane-749d76644c-vhss5\" (UID: \"643aaef9-e302-436b-943e-940480ef74fc\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.268773 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/643aaef9-e302-436b-943e-940480ef74fc-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vhss5\" (UID: \"643aaef9-e302-436b-943e-940480ef74fc\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.281767 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.289225 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.289285 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.289302 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.289648 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.289667 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:12Z","lastTransitionTime":"2026-01-27T07:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.305184 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.325291 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.344227 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.358504 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.369336 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/643aaef9-e302-436b-943e-940480ef74fc-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vhss5\" (UID: \"643aaef9-e302-436b-943e-940480ef74fc\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.369384 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q989\" (UniqueName: \"kubernetes.io/projected/643aaef9-e302-436b-943e-940480ef74fc-kube-api-access-8q989\") pod \"ovnkube-control-plane-749d76644c-vhss5\" (UID: \"643aaef9-e302-436b-943e-940480ef74fc\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.369482 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/643aaef9-e302-436b-943e-940480ef74fc-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vhss5\" (UID: \"643aaef9-e302-436b-943e-940480ef74fc\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.369523 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/643aaef9-e302-436b-943e-940480ef74fc-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vhss5\" (UID: \"643aaef9-e302-436b-943e-940480ef74fc\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.370089 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/643aaef9-e302-436b-943e-940480ef74fc-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vhss5\" (UID: \"643aaef9-e302-436b-943e-940480ef74fc\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.370497 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/643aaef9-e302-436b-943e-940480ef74fc-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vhss5\" (UID: \"643aaef9-e302-436b-943e-940480ef74fc\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.376363 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/643aaef9-e302-436b-943e-940480ef74fc-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vhss5\" (UID: \"643aaef9-e302-436b-943e-940480ef74fc\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.377911 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.386990 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8q989\" (UniqueName: \"kubernetes.io/projected/643aaef9-e302-436b-943e-940480ef74fc-kube-api-access-8q989\") pod \"ovnkube-control-plane-749d76644c-vhss5\" (UID: \"643aaef9-e302-436b-943e-940480ef74fc\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.393398 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.393436 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.393453 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.393472 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.393485 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:12Z","lastTransitionTime":"2026-01-27T07:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.401478 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fc438285725e8717b0f840dbe429df899942e4ffb4df80a73d92b32cc4635b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:10Z\\\",\\\"message\\\":\\\"dler 7\\\\nI0127 07:52:09.988526 6153 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0127 07:52:09.988682 6153 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.988716 6153 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.988908 6153 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.989091 6153 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.989155 6153 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.989230 6153 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.989309 6153 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.404821 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/1.log" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.405294 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/0.log" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.407748 4787 generic.go:334] "Generic (PLEG): container finished" podID="fa44405c-042c-485a-ab6c-912dcd377751" containerID="023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995" exitCode=1 Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.407795 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerDied","Data":"023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995"} Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.407842 4787 scope.go:117] "RemoveContainer" containerID="6fc438285725e8717b0f840dbe429df899942e4ffb4df80a73d92b32cc4635b5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.408586 4787 scope.go:117] "RemoveContainer" containerID="023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995" Jan 27 07:52:12 crc kubenswrapper[4787]: E0127 07:52:12.408795 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.414143 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.433999 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fc438285725e8717b0f840dbe429df899942e4ffb4df80a73d92b32cc4635b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:10Z\\\",\\\"message\\\":\\\"dler 7\\\\nI0127 07:52:09.988526 6153 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0127 07:52:09.988682 6153 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.988716 6153 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.988908 6153 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.989091 6153 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.989155 6153 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.989230 6153 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.989309 6153 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"80c50373a5e91f08c5893365bfd5a5040449b1b6585a23 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{apiserver: true,},ClusterIP:10.217.4.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0127 07:52:11.779634 6307 services_controller.go:356] Processing sync for service openshift-service-ca-operator/metrics for network=default\\\\nF0127 07:52:11.779647 6307 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network contro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.434444 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.446899 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.459709 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.470066 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.484626 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.496841 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.496886 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.496899 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.496922 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.496937 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:12Z","lastTransitionTime":"2026-01-27T07:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.497616 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.512155 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.526022 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.539590 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.553797 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.571147 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.593255 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.598974 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.599011 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.599024 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.599045 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.599061 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:12Z","lastTransitionTime":"2026-01-27T07:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.608150 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.622404 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: W0127 07:52:12.638511 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod643aaef9_e302_436b_943e_940480ef74fc.slice/crio-13a1bc0eb4e46bce4afb0df853789c0fd5320c9f7e6ff4d9bbe7f61afa193a12 WatchSource:0}: Error finding container 13a1bc0eb4e46bce4afb0df853789c0fd5320c9f7e6ff4d9bbe7f61afa193a12: Status 404 returned error can't find the container with id 13a1bc0eb4e46bce4afb0df853789c0fd5320c9f7e6ff4d9bbe7f61afa193a12 Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.638997 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.658219 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:12Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.701575 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.701622 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.701632 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.701647 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.701658 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:12Z","lastTransitionTime":"2026-01-27T07:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.803936 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.803981 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.803991 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.804009 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.804019 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:12Z","lastTransitionTime":"2026-01-27T07:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.907491 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.907562 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.907575 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.907598 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:12 crc kubenswrapper[4787]: I0127 07:52:12.907612 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:12Z","lastTransitionTime":"2026-01-27T07:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.010243 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.010311 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.010326 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.010350 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.010365 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:13Z","lastTransitionTime":"2026-01-27T07:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.050618 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 01:59:32.079129351 +0000 UTC Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.076171 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.076270 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:13 crc kubenswrapper[4787]: E0127 07:52:13.076410 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.076444 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:13 crc kubenswrapper[4787]: E0127 07:52:13.076594 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:13 crc kubenswrapper[4787]: E0127 07:52:13.076766 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.113642 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.113697 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.113708 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.113726 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.113738 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:13Z","lastTransitionTime":"2026-01-27T07:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.217187 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.217239 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.217252 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.217274 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.217288 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:13Z","lastTransitionTime":"2026-01-27T07:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.296807 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.320595 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.320645 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.320662 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.320688 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.320703 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:13Z","lastTransitionTime":"2026-01-27T07:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.416879 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.419723 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.420168 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.423042 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.423066 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.423082 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.423098 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.423108 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:13Z","lastTransitionTime":"2026-01-27T07:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.425267 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" event={"ID":"643aaef9-e302-436b-943e-940480ef74fc","Type":"ContainerStarted","Data":"ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.425380 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" event={"ID":"643aaef9-e302-436b-943e-940480ef74fc","Type":"ContainerStarted","Data":"5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.425409 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" event={"ID":"643aaef9-e302-436b-943e-940480ef74fc","Type":"ContainerStarted","Data":"13a1bc0eb4e46bce4afb0df853789c0fd5320c9f7e6ff4d9bbe7f61afa193a12"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.427700 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/1.log" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.432144 4787 scope.go:117] "RemoveContainer" containerID="023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995" Jan 27 07:52:13 crc kubenswrapper[4787]: E0127 07:52:13.432335 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.433917 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.448570 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.466047 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.481216 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.494406 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.505938 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.519175 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.525507 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.525595 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.525611 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.525635 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.525650 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:13Z","lastTransitionTime":"2026-01-27T07:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.534656 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.549150 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.563382 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.576818 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.586044 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-vws75"] Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.586956 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:13 crc kubenswrapper[4787]: E0127 07:52:13.587153 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.597598 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.620503 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.628611 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.628677 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.628691 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.628714 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.628730 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:13Z","lastTransitionTime":"2026-01-27T07:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.638079 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.651905 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.675277 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fc438285725e8717b0f840dbe429df899942e4ffb4df80a73d92b32cc4635b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:10Z\\\",\\\"message\\\":\\\"dler 7\\\\nI0127 07:52:09.988526 6153 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0127 07:52:09.988682 6153 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.988716 6153 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.988908 6153 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.989091 6153 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.989155 6153 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 07:52:09.989230 6153 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 07:52:09.989309 6153 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"80c50373a5e91f08c5893365bfd5a5040449b1b6585a23 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{apiserver: true,},ClusterIP:10.217.4.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0127 07:52:11.779634 6307 services_controller.go:356] Processing sync for service openshift-service-ca-operator/metrics for network=default\\\\nF0127 07:52:11.779647 6307 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network contro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.684502 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs\") pod \"network-metrics-daemon-vws75\" (UID: \"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\") " pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.684588 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq2mg\" (UniqueName: \"kubernetes.io/projected/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-kube-api-access-sq2mg\") pod \"network-metrics-daemon-vws75\" (UID: \"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\") " pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.690904 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.706078 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.719981 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.732242 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.732292 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.732304 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.732325 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.732338 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:13Z","lastTransitionTime":"2026-01-27T07:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.733856 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.747199 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.766722 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.780930 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.785215 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs\") pod \"network-metrics-daemon-vws75\" (UID: \"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\") " pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.785282 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq2mg\" (UniqueName: \"kubernetes.io/projected/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-kube-api-access-sq2mg\") pod \"network-metrics-daemon-vws75\" (UID: \"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\") " pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:13 crc kubenswrapper[4787]: E0127 07:52:13.785423 4787 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:52:13 crc kubenswrapper[4787]: E0127 07:52:13.785499 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs podName:3969f21f-ab36-49b4-9a9c-02cf19e65ad0 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:14.285478108 +0000 UTC m=+39.937833600 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs") pod "network-metrics-daemon-vws75" (UID: "3969f21f-ab36-49b4-9a9c-02cf19e65ad0") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.794293 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.803731 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq2mg\" (UniqueName: \"kubernetes.io/projected/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-kube-api-access-sq2mg\") pod \"network-metrics-daemon-vws75\" (UID: \"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\") " pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.808765 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.830785 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.835774 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.835810 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.835822 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.835841 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.835856 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:13Z","lastTransitionTime":"2026-01-27T07:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.847920 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.893980 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.918238 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.938958 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.939008 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.939020 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.939041 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.939054 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:13Z","lastTransitionTime":"2026-01-27T07:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.939280 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.950307 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.969342 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"80c50373a5e91f08c5893365bfd5a5040449b1b6585a23 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{apiserver: true,},ClusterIP:10.217.4.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0127 07:52:11.779634 6307 services_controller.go:356] Processing sync for service openshift-service-ca-operator/metrics for network=default\\\\nF0127 07:52:11.779647 6307 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network contro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:13 crc kubenswrapper[4787]: I0127 07:52:13.982975 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.042008 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.042070 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.042087 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.042112 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.042131 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:14Z","lastTransitionTime":"2026-01-27T07:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.051424 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 06:28:52.946399306 +0000 UTC Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.145527 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.145613 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.145633 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.145660 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.145676 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:14Z","lastTransitionTime":"2026-01-27T07:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.248798 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.248881 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.248905 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.248939 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.248966 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:14Z","lastTransitionTime":"2026-01-27T07:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.290761 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs\") pod \"network-metrics-daemon-vws75\" (UID: \"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\") " pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:14 crc kubenswrapper[4787]: E0127 07:52:14.290928 4787 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:52:14 crc kubenswrapper[4787]: E0127 07:52:14.291009 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs podName:3969f21f-ab36-49b4-9a9c-02cf19e65ad0 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:15.290984458 +0000 UTC m=+40.943339950 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs") pod "network-metrics-daemon-vws75" (UID: "3969f21f-ab36-49b4-9a9c-02cf19e65ad0") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.352161 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.352269 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.352291 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.352320 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.352341 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:14Z","lastTransitionTime":"2026-01-27T07:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.455760 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.456402 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.456419 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.456449 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.456466 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:14Z","lastTransitionTime":"2026-01-27T07:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.559729 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.559784 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.559794 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.559816 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.559829 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:14Z","lastTransitionTime":"2026-01-27T07:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.663456 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.663500 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.663510 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.663527 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.663540 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:14Z","lastTransitionTime":"2026-01-27T07:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.766717 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.766800 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.766817 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.766844 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.766860 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:14Z","lastTransitionTime":"2026-01-27T07:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.870786 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.870847 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.870862 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.870883 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.870897 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:14Z","lastTransitionTime":"2026-01-27T07:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.973954 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.974005 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.974018 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.974039 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:14 crc kubenswrapper[4787]: I0127 07:52:14.974051 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:14Z","lastTransitionTime":"2026-01-27T07:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.051746 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 09:24:59.099095412 +0000 UTC Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.075921 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.076044 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:15 crc kubenswrapper[4787]: E0127 07:52:15.076110 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.076213 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:15 crc kubenswrapper[4787]: E0127 07:52:15.076256 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.076345 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:15 crc kubenswrapper[4787]: E0127 07:52:15.076615 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:15 crc kubenswrapper[4787]: E0127 07:52:15.076787 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.077545 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.077660 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.077686 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.077725 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.077753 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:15Z","lastTransitionTime":"2026-01-27T07:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.096908 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.122999 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"80c50373a5e91f08c5893365bfd5a5040449b1b6585a23 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{apiserver: true,},ClusterIP:10.217.4.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0127 07:52:11.779634 6307 services_controller.go:356] Processing sync for service openshift-service-ca-operator/metrics for network=default\\\\nF0127 07:52:11.779647 6307 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network contro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.136624 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.149639 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.166641 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.181784 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.181847 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.181868 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.181893 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.181909 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:15Z","lastTransitionTime":"2026-01-27T07:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.182405 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.199545 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.210303 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.225168 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.239502 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.256876 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.274321 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.286799 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.286849 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.286860 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.286882 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.286897 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:15Z","lastTransitionTime":"2026-01-27T07:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.287215 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.304686 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs\") pod \"network-metrics-daemon-vws75\" (UID: \"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\") " pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:15 crc kubenswrapper[4787]: E0127 07:52:15.304903 4787 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:52:15 crc kubenswrapper[4787]: E0127 07:52:15.305039 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs podName:3969f21f-ab36-49b4-9a9c-02cf19e65ad0 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:17.30501564 +0000 UTC m=+42.957371132 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs") pod "network-metrics-daemon-vws75" (UID: "3969f21f-ab36-49b4-9a9c-02cf19e65ad0") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.307860 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.330819 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.346859 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.363586 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:15Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.390735 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.390782 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.390810 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.390830 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.390844 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:15Z","lastTransitionTime":"2026-01-27T07:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.495148 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.495227 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.495242 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.495267 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.495301 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:15Z","lastTransitionTime":"2026-01-27T07:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.598649 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.598688 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.598699 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.598716 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.598730 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:15Z","lastTransitionTime":"2026-01-27T07:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.701986 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.702085 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.702127 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.702168 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.702224 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:15Z","lastTransitionTime":"2026-01-27T07:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.804706 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.804764 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.804791 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.804815 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.804866 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:15Z","lastTransitionTime":"2026-01-27T07:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.908252 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.908390 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.908408 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.908442 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:15 crc kubenswrapper[4787]: I0127 07:52:15.908456 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:15Z","lastTransitionTime":"2026-01-27T07:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.012915 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.012971 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.012984 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.013012 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.013024 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:16Z","lastTransitionTime":"2026-01-27T07:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.052195 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 05:35:37.804762973 +0000 UTC Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.116697 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.116740 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.116749 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.116766 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.116778 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:16Z","lastTransitionTime":"2026-01-27T07:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.220698 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.220770 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.220783 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.220803 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.220816 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:16Z","lastTransitionTime":"2026-01-27T07:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.323415 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.323476 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.323489 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.323508 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.323522 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:16Z","lastTransitionTime":"2026-01-27T07:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.425782 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.425849 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.425864 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.425885 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.425900 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:16Z","lastTransitionTime":"2026-01-27T07:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.528241 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.528312 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.528335 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.528363 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.528385 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:16Z","lastTransitionTime":"2026-01-27T07:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.632019 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.632113 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.632141 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.632177 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.632201 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:16Z","lastTransitionTime":"2026-01-27T07:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.735084 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.735147 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.735161 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.735183 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.735202 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:16Z","lastTransitionTime":"2026-01-27T07:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.838121 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.838159 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.838170 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.838186 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.838197 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:16Z","lastTransitionTime":"2026-01-27T07:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.941541 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.941622 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.941642 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.941662 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:16 crc kubenswrapper[4787]: I0127 07:52:16.941677 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:16Z","lastTransitionTime":"2026-01-27T07:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.044717 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.044781 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.044797 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.044823 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.044843 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:17Z","lastTransitionTime":"2026-01-27T07:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.052490 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 09:19:06.219435008 +0000 UTC Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.077903 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.077935 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.077983 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.077991 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:17 crc kubenswrapper[4787]: E0127 07:52:17.078108 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:17 crc kubenswrapper[4787]: E0127 07:52:17.078299 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:17 crc kubenswrapper[4787]: E0127 07:52:17.078434 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:17 crc kubenswrapper[4787]: E0127 07:52:17.078533 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.147933 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.148011 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.148037 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.148069 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.148087 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:17Z","lastTransitionTime":"2026-01-27T07:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.250766 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.250857 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.250870 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.250889 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.250904 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:17Z","lastTransitionTime":"2026-01-27T07:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.331335 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs\") pod \"network-metrics-daemon-vws75\" (UID: \"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\") " pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:17 crc kubenswrapper[4787]: E0127 07:52:17.331511 4787 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:52:17 crc kubenswrapper[4787]: E0127 07:52:17.331650 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs podName:3969f21f-ab36-49b4-9a9c-02cf19e65ad0 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:21.331589051 +0000 UTC m=+46.983944553 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs") pod "network-metrics-daemon-vws75" (UID: "3969f21f-ab36-49b4-9a9c-02cf19e65ad0") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.354867 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.354954 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.354983 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.355017 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.355045 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:17Z","lastTransitionTime":"2026-01-27T07:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.458246 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.458353 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.458384 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.458425 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.458453 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:17Z","lastTransitionTime":"2026-01-27T07:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.561201 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.561246 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.561259 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.561275 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.561284 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:17Z","lastTransitionTime":"2026-01-27T07:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.664128 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.664192 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.664202 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.664220 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.664240 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:17Z","lastTransitionTime":"2026-01-27T07:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.768149 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.768215 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.768230 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.768260 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.768275 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:17Z","lastTransitionTime":"2026-01-27T07:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.871414 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.871471 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.871486 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.871510 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.871528 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:17Z","lastTransitionTime":"2026-01-27T07:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.975310 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.975373 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.975395 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.975417 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:17 crc kubenswrapper[4787]: I0127 07:52:17.975430 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:17Z","lastTransitionTime":"2026-01-27T07:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.053606 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 12:25:27.308115905 +0000 UTC Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.078516 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.078588 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.078599 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.078617 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.078630 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:18Z","lastTransitionTime":"2026-01-27T07:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.181389 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.181450 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.181459 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.181507 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.181519 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:18Z","lastTransitionTime":"2026-01-27T07:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.285363 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.285419 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.285432 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.285453 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.285467 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:18Z","lastTransitionTime":"2026-01-27T07:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.389243 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.389326 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.389346 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.389378 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.389401 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:18Z","lastTransitionTime":"2026-01-27T07:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.492777 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.492852 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.492869 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.492896 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.492917 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:18Z","lastTransitionTime":"2026-01-27T07:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.595771 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.595837 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.595862 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.595896 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.595920 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:18Z","lastTransitionTime":"2026-01-27T07:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.699559 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.699676 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.699699 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.699724 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.699744 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:18Z","lastTransitionTime":"2026-01-27T07:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.802453 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.802552 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.802611 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.802650 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.802679 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:18Z","lastTransitionTime":"2026-01-27T07:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.905704 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.905763 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.905772 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.905790 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:18 crc kubenswrapper[4787]: I0127 07:52:18.905831 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:18Z","lastTransitionTime":"2026-01-27T07:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.008935 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.008998 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.009015 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.009043 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.009064 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:19Z","lastTransitionTime":"2026-01-27T07:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.053863 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 19:34:52.818339788 +0000 UTC Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.076674 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:19 crc kubenswrapper[4787]: E0127 07:52:19.076856 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.076980 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.077058 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.077203 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:19 crc kubenswrapper[4787]: E0127 07:52:19.077276 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:19 crc kubenswrapper[4787]: E0127 07:52:19.077626 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:19 crc kubenswrapper[4787]: E0127 07:52:19.077847 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.113484 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.113527 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.113538 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.113573 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.113588 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:19Z","lastTransitionTime":"2026-01-27T07:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.217137 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.217200 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.217213 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.217235 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.217254 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:19Z","lastTransitionTime":"2026-01-27T07:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.320331 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.320417 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.320444 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.320483 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.320508 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:19Z","lastTransitionTime":"2026-01-27T07:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.424237 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.424322 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.424349 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.424388 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.424413 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:19Z","lastTransitionTime":"2026-01-27T07:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.527319 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.527377 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.527389 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.527411 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.527424 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:19Z","lastTransitionTime":"2026-01-27T07:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.630033 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.630085 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.630100 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.630122 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.630139 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:19Z","lastTransitionTime":"2026-01-27T07:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.735000 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.735057 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.735069 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.735089 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.735101 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:19Z","lastTransitionTime":"2026-01-27T07:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.837847 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.837902 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.837915 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.837934 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.837950 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:19Z","lastTransitionTime":"2026-01-27T07:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.942410 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.942458 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.942467 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.942485 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:19 crc kubenswrapper[4787]: I0127 07:52:19.942497 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:19Z","lastTransitionTime":"2026-01-27T07:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.044736 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.044769 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.044777 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.044793 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.044802 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:20Z","lastTransitionTime":"2026-01-27T07:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.054529 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 23:57:16.484027287 +0000 UTC Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.148517 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.148595 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.148613 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.148635 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.148667 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:20Z","lastTransitionTime":"2026-01-27T07:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.252420 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.252507 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.252527 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.252596 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.252618 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:20Z","lastTransitionTime":"2026-01-27T07:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.355212 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.355277 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.355290 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.355309 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.355321 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:20Z","lastTransitionTime":"2026-01-27T07:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.458469 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.458543 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.458591 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.458620 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.458639 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:20Z","lastTransitionTime":"2026-01-27T07:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.561830 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.561924 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.561946 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.561976 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.562012 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:20Z","lastTransitionTime":"2026-01-27T07:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.665852 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.666119 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.666138 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.666157 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.666178 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:20Z","lastTransitionTime":"2026-01-27T07:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.769426 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.769474 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.769485 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.769510 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.769524 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:20Z","lastTransitionTime":"2026-01-27T07:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.872869 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.872938 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.872956 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.873002 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.873021 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:20Z","lastTransitionTime":"2026-01-27T07:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.977436 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.977494 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.977511 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.977531 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:20 crc kubenswrapper[4787]: I0127 07:52:20.977544 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:20Z","lastTransitionTime":"2026-01-27T07:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.055231 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 07:36:04.241787243 +0000 UTC Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.075807 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.075881 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:21 crc kubenswrapper[4787]: E0127 07:52:21.076526 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.075979 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:21 crc kubenswrapper[4787]: E0127 07:52:21.076733 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:21 crc kubenswrapper[4787]: E0127 07:52:21.076948 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.075907 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:21 crc kubenswrapper[4787]: E0127 07:52:21.077223 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.079776 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.079812 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.079823 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.079843 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.079855 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:21Z","lastTransitionTime":"2026-01-27T07:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.182986 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.183043 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.183060 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.183081 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.183095 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:21Z","lastTransitionTime":"2026-01-27T07:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.286863 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.286947 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.286973 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.287004 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.287026 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:21Z","lastTransitionTime":"2026-01-27T07:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.387716 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs\") pod \"network-metrics-daemon-vws75\" (UID: \"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\") " pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:21 crc kubenswrapper[4787]: E0127 07:52:21.387918 4787 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:52:21 crc kubenswrapper[4787]: E0127 07:52:21.388015 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs podName:3969f21f-ab36-49b4-9a9c-02cf19e65ad0 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:29.387986151 +0000 UTC m=+55.040341653 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs") pod "network-metrics-daemon-vws75" (UID: "3969f21f-ab36-49b4-9a9c-02cf19e65ad0") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.390344 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.390398 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.390411 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.390432 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.390447 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:21Z","lastTransitionTime":"2026-01-27T07:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.493066 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.493098 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.493106 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.493120 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.493129 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:21Z","lastTransitionTime":"2026-01-27T07:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.493922 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.493952 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.493960 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.493970 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.493979 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:21Z","lastTransitionTime":"2026-01-27T07:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:21 crc kubenswrapper[4787]: E0127 07:52:21.507166 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:21Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.513227 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.513276 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.513286 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.513303 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.513314 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:21Z","lastTransitionTime":"2026-01-27T07:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:21 crc kubenswrapper[4787]: E0127 07:52:21.526437 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:21Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.531869 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.531913 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.531922 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.531942 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.531954 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:21Z","lastTransitionTime":"2026-01-27T07:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:21 crc kubenswrapper[4787]: E0127 07:52:21.546909 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:21Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.551683 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.551742 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.551755 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.551780 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.551795 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:21Z","lastTransitionTime":"2026-01-27T07:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:21 crc kubenswrapper[4787]: E0127 07:52:21.570390 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:21Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.575836 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.575899 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.575921 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.575946 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.575965 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:21Z","lastTransitionTime":"2026-01-27T07:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:21 crc kubenswrapper[4787]: E0127 07:52:21.591024 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:21Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:21 crc kubenswrapper[4787]: E0127 07:52:21.591222 4787 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.595826 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.595885 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.595897 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.595916 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.595932 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:21Z","lastTransitionTime":"2026-01-27T07:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.698544 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.698622 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.698637 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.698662 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.698678 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:21Z","lastTransitionTime":"2026-01-27T07:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.802137 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.802197 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.802211 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.802236 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.802256 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:21Z","lastTransitionTime":"2026-01-27T07:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.906203 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.906270 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.906296 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.906332 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:21 crc kubenswrapper[4787]: I0127 07:52:21.906356 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:21Z","lastTransitionTime":"2026-01-27T07:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.010030 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.010135 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.010152 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.010177 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.010197 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:22Z","lastTransitionTime":"2026-01-27T07:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.056074 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 20:56:32.839215637 +0000 UTC Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.113724 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.113786 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.113801 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.113826 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.113842 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:22Z","lastTransitionTime":"2026-01-27T07:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.217569 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.217610 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.217621 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.217639 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.217651 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:22Z","lastTransitionTime":"2026-01-27T07:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.321108 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.321172 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.321181 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.321198 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.321209 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:22Z","lastTransitionTime":"2026-01-27T07:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.424735 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.424836 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.424879 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.424913 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.424935 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:22Z","lastTransitionTime":"2026-01-27T07:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.528818 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.528891 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.528912 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.528939 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.528953 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:22Z","lastTransitionTime":"2026-01-27T07:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.632050 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.632105 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.632115 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.632134 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.632147 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:22Z","lastTransitionTime":"2026-01-27T07:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.736159 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.736229 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.736249 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.736276 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.736297 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:22Z","lastTransitionTime":"2026-01-27T07:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.840229 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.840324 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.840346 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.840371 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.840395 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:22Z","lastTransitionTime":"2026-01-27T07:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.943347 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.943423 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.943444 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.943478 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:22 crc kubenswrapper[4787]: I0127 07:52:22.943502 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:22Z","lastTransitionTime":"2026-01-27T07:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.046914 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.046987 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.047006 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.047035 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.047056 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:23Z","lastTransitionTime":"2026-01-27T07:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.057231 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 22:59:47.377984883 +0000 UTC Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.075725 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.075810 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.075875 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:23 crc kubenswrapper[4787]: E0127 07:52:23.075994 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.076104 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:23 crc kubenswrapper[4787]: E0127 07:52:23.076344 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:23 crc kubenswrapper[4787]: E0127 07:52:23.076535 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:23 crc kubenswrapper[4787]: E0127 07:52:23.081432 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.150434 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.150489 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.150503 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.150527 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.150542 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:23Z","lastTransitionTime":"2026-01-27T07:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.253958 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.254029 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.254042 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.254062 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.254076 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:23Z","lastTransitionTime":"2026-01-27T07:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.356258 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.356311 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.356322 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.356340 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.356352 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:23Z","lastTransitionTime":"2026-01-27T07:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.459710 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.459797 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.459816 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.459847 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.459871 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:23Z","lastTransitionTime":"2026-01-27T07:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.563494 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.563645 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.563682 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.563720 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.563745 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:23Z","lastTransitionTime":"2026-01-27T07:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.667875 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.667958 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.667977 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.668006 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.668026 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:23Z","lastTransitionTime":"2026-01-27T07:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.770855 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.770897 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.770913 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.770928 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.770938 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:23Z","lastTransitionTime":"2026-01-27T07:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.874386 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.874480 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.874500 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.874528 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.874547 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:23Z","lastTransitionTime":"2026-01-27T07:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.977459 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.977853 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.977943 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.978032 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:23 crc kubenswrapper[4787]: I0127 07:52:23.978106 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:23Z","lastTransitionTime":"2026-01-27T07:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.057473 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 08:44:38.285442502 +0000 UTC Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.082645 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.082698 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.082710 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.082729 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.082744 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:24Z","lastTransitionTime":"2026-01-27T07:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.185427 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.185514 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.185530 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.185555 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.185591 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:24Z","lastTransitionTime":"2026-01-27T07:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.288736 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.288813 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.288830 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.288854 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.288871 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:24Z","lastTransitionTime":"2026-01-27T07:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.392031 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.392126 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.392151 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.392187 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.392212 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:24Z","lastTransitionTime":"2026-01-27T07:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.494757 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.494799 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.494807 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.494822 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.494832 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:24Z","lastTransitionTime":"2026-01-27T07:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.598030 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.598097 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.598115 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.598139 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.598153 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:24Z","lastTransitionTime":"2026-01-27T07:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.700863 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.700921 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.700934 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.700953 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.700965 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:24Z","lastTransitionTime":"2026-01-27T07:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.804279 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.804333 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.804343 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.804362 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.804374 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:24Z","lastTransitionTime":"2026-01-27T07:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.908494 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.908537 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.908550 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.908580 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:24 crc kubenswrapper[4787]: I0127 07:52:24.908591 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:24Z","lastTransitionTime":"2026-01-27T07:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.011440 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.011888 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.012029 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.012161 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.012286 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:25Z","lastTransitionTime":"2026-01-27T07:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.058535 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 20:19:58.664419934 +0000 UTC Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.076355 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.076422 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:25 crc kubenswrapper[4787]: E0127 07:52:25.076624 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.076733 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:25 crc kubenswrapper[4787]: E0127 07:52:25.076782 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:25 crc kubenswrapper[4787]: E0127 07:52:25.076937 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.077250 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:25 crc kubenswrapper[4787]: E0127 07:52:25.077668 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.096348 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.115010 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.115741 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.115879 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.115965 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.116059 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.116139 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:25Z","lastTransitionTime":"2026-01-27T07:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.132006 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.144367 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.161230 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.177727 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.193401 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.210622 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.219084 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.219133 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.219145 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.219170 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.219185 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:25Z","lastTransitionTime":"2026-01-27T07:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.226331 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.242388 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.265208 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.288831 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.306352 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.322338 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.322816 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.322864 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.322877 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.322904 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.322920 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:25Z","lastTransitionTime":"2026-01-27T07:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.337713 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.358753 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"80c50373a5e91f08c5893365bfd5a5040449b1b6585a23 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{apiserver: true,},ClusterIP:10.217.4.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0127 07:52:11.779634 6307 services_controller.go:356] Processing sync for service openshift-service-ca-operator/metrics for network=default\\\\nF0127 07:52:11.779647 6307 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network contro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.373948 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:25Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.425458 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.425512 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.425525 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.425569 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.425585 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:25Z","lastTransitionTime":"2026-01-27T07:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.528808 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.528875 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.528893 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.528915 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.528929 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:25Z","lastTransitionTime":"2026-01-27T07:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.632645 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.632768 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.632789 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.632819 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.632841 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:25Z","lastTransitionTime":"2026-01-27T07:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.736543 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.736634 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.736647 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.736663 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.736674 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:25Z","lastTransitionTime":"2026-01-27T07:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.839515 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.839631 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.839658 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.839695 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.839720 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:25Z","lastTransitionTime":"2026-01-27T07:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.943542 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.943641 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.943663 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.943693 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:25 crc kubenswrapper[4787]: I0127 07:52:25.943713 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:25Z","lastTransitionTime":"2026-01-27T07:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.047288 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.047376 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.047398 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.047429 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.047448 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:26Z","lastTransitionTime":"2026-01-27T07:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.058852 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 09:51:59.391602252 +0000 UTC Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.149795 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.149843 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.149857 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.149877 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.149892 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:26Z","lastTransitionTime":"2026-01-27T07:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.253369 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.253421 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.253437 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.253467 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.253483 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:26Z","lastTransitionTime":"2026-01-27T07:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.357873 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.357947 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.357969 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.358004 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.358028 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:26Z","lastTransitionTime":"2026-01-27T07:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.461378 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.461454 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.461475 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.461505 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.461526 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:26Z","lastTransitionTime":"2026-01-27T07:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.564185 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.564246 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.564263 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.564283 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.564302 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:26Z","lastTransitionTime":"2026-01-27T07:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.667656 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.667702 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.667715 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.667733 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.667748 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:26Z","lastTransitionTime":"2026-01-27T07:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.770428 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.770478 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.770490 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.770505 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.770517 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:26Z","lastTransitionTime":"2026-01-27T07:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.856352 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:52:26 crc kubenswrapper[4787]: E0127 07:52:26.856611 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:52:58.856580423 +0000 UTC m=+84.508935915 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.856697 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.856759 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.856821 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.856866 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:26 crc kubenswrapper[4787]: E0127 07:52:26.856966 4787 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:52:26 crc kubenswrapper[4787]: E0127 07:52:26.857001 4787 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:52:26 crc kubenswrapper[4787]: E0127 07:52:26.856999 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:52:26 crc kubenswrapper[4787]: E0127 07:52:26.857008 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:58.856998381 +0000 UTC m=+84.509353873 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:52:26 crc kubenswrapper[4787]: E0127 07:52:26.857059 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:58.857048902 +0000 UTC m=+84.509404394 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:52:26 crc kubenswrapper[4787]: E0127 07:52:26.857063 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:52:26 crc kubenswrapper[4787]: E0127 07:52:26.857097 4787 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:26 crc kubenswrapper[4787]: E0127 07:52:26.857196 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:52:26 crc kubenswrapper[4787]: E0127 07:52:26.857274 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:52:26 crc kubenswrapper[4787]: E0127 07:52:26.857299 4787 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:26 crc kubenswrapper[4787]: E0127 07:52:26.857231 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:58.857192565 +0000 UTC m=+84.509548087 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:26 crc kubenswrapper[4787]: E0127 07:52:26.857420 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:58.85737774 +0000 UTC m=+84.509733442 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.873267 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.873329 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.873340 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.873357 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.873369 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:26Z","lastTransitionTime":"2026-01-27T07:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.977474 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.977531 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.977541 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.977574 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:26 crc kubenswrapper[4787]: I0127 07:52:26.977586 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:26Z","lastTransitionTime":"2026-01-27T07:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.059354 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 08:36:39.106737923 +0000 UTC Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.076280 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.076297 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.076457 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.077336 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:27 crc kubenswrapper[4787]: E0127 07:52:27.077628 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:27 crc kubenswrapper[4787]: E0127 07:52:27.077861 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:27 crc kubenswrapper[4787]: E0127 07:52:27.078132 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:27 crc kubenswrapper[4787]: E0127 07:52:27.078278 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.083489 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.083529 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.083540 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.083570 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.083580 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:27Z","lastTransitionTime":"2026-01-27T07:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.154601 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.171622 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.173049 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.187015 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.187088 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.187103 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.187123 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.187155 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:27Z","lastTransitionTime":"2026-01-27T07:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.190350 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.206533 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.219272 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.231633 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.250311 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.276538 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.290159 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.290218 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.290231 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.290252 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.290272 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:27Z","lastTransitionTime":"2026-01-27T07:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.293267 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.312508 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.331683 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.351349 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.385442 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"80c50373a5e91f08c5893365bfd5a5040449b1b6585a23 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{apiserver: true,},ClusterIP:10.217.4.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0127 07:52:11.779634 6307 services_controller.go:356] Processing sync for service openshift-service-ca-operator/metrics for network=default\\\\nF0127 07:52:11.779647 6307 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network contro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.393411 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.393464 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.393475 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.393496 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.393507 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:27Z","lastTransitionTime":"2026-01-27T07:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.398706 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.412849 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.437378 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.458295 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.476522 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:27Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.496043 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.496107 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.496125 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.496150 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.496170 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:27Z","lastTransitionTime":"2026-01-27T07:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.599411 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.599473 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.599486 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.599511 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.599529 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:27Z","lastTransitionTime":"2026-01-27T07:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.702397 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.702458 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.702476 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.702501 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.702519 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:27Z","lastTransitionTime":"2026-01-27T07:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.806043 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.806113 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.806139 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.806172 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.806192 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:27Z","lastTransitionTime":"2026-01-27T07:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.909615 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.909691 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.909701 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.909738 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:27 crc kubenswrapper[4787]: I0127 07:52:27.909749 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:27Z","lastTransitionTime":"2026-01-27T07:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.013692 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.013824 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.013845 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.013869 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.013888 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:28Z","lastTransitionTime":"2026-01-27T07:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.059725 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 17:48:35.479976623 +0000 UTC Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.117408 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.117483 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.117508 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.117540 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.117593 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:28Z","lastTransitionTime":"2026-01-27T07:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.220095 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.220139 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.220151 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.220168 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.220180 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:28Z","lastTransitionTime":"2026-01-27T07:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.323392 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.324023 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.324189 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.324352 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.324498 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:28Z","lastTransitionTime":"2026-01-27T07:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.428485 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.428545 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.428592 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.428620 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.428643 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:28Z","lastTransitionTime":"2026-01-27T07:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.532931 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.533067 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.533095 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.533128 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.533152 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:28Z","lastTransitionTime":"2026-01-27T07:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.637214 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.637289 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.637301 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.637323 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.637336 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:28Z","lastTransitionTime":"2026-01-27T07:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.741765 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.741838 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.741858 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.741889 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.741909 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:28Z","lastTransitionTime":"2026-01-27T07:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.845701 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.845783 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.845806 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.845833 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.845848 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:28Z","lastTransitionTime":"2026-01-27T07:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.951225 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.951347 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.951422 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.951621 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:28 crc kubenswrapper[4787]: I0127 07:52:28.951657 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:28Z","lastTransitionTime":"2026-01-27T07:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.056040 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.056129 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.056156 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.056187 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.056209 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:29Z","lastTransitionTime":"2026-01-27T07:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.060422 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 16:18:52.859487495 +0000 UTC Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.075647 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.075696 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.075742 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.075808 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:29 crc kubenswrapper[4787]: E0127 07:52:29.076172 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:29 crc kubenswrapper[4787]: E0127 07:52:29.076301 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:29 crc kubenswrapper[4787]: E0127 07:52:29.076400 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:29 crc kubenswrapper[4787]: E0127 07:52:29.077241 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.077957 4787 scope.go:117] "RemoveContainer" containerID="023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.085259 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.105596 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.126970 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.148845 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.161616 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.161698 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.161750 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.161796 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.161856 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:29Z","lastTransitionTime":"2026-01-27T07:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.164894 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.184813 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.204035 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.242698 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.265941 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.265993 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.266004 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.266023 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.266034 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:29Z","lastTransitionTime":"2026-01-27T07:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.269774 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.288431 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.314414 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.333342 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.366796 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"80c50373a5e91f08c5893365bfd5a5040449b1b6585a23 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{apiserver: true,},ClusterIP:10.217.4.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0127 07:52:11.779634 6307 services_controller.go:356] Processing sync for service openshift-service-ca-operator/metrics for network=default\\\\nF0127 07:52:11.779647 6307 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network contro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.370024 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.370086 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.370108 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.370138 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.370155 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:29Z","lastTransitionTime":"2026-01-27T07:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.381327 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.393382 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.410123 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d28aae4-3359-4d7e-8862-8db4b17a3403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d820bf9fe1c788b0bb17535226d96cd310fc188148091fa8b4186dd43baa5f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d407da23fc5babf6cec063fb31861fd8ac0f456a44746a83740c61c1e53a436c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0509b185e88f56d56bc6f8429a620d74855d938f1206d6c9193d8e93fc12ae91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.427189 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.441825 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.453371 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.472268 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.472308 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.472318 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.472335 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.472346 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:29Z","lastTransitionTime":"2026-01-27T07:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.487263 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs\") pod \"network-metrics-daemon-vws75\" (UID: \"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\") " pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:29 crc kubenswrapper[4787]: E0127 07:52:29.487482 4787 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:52:29 crc kubenswrapper[4787]: E0127 07:52:29.487617 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs podName:3969f21f-ab36-49b4-9a9c-02cf19e65ad0 nodeName:}" failed. No retries permitted until 2026-01-27 07:52:45.487580717 +0000 UTC m=+71.139936359 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs") pod "network-metrics-daemon-vws75" (UID: "3969f21f-ab36-49b4-9a9c-02cf19e65ad0") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.502380 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/1.log" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.506948 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerStarted","Data":"adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f"} Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.507699 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.530579 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.546697 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.560664 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.574817 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.574889 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.574904 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.574926 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.574964 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:29Z","lastTransitionTime":"2026-01-27T07:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.577295 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.599542 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.631993 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.679059 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.679104 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.679116 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.679135 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.679147 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:29Z","lastTransitionTime":"2026-01-27T07:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.682482 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"80c50373a5e91f08c5893365bfd5a5040449b1b6585a23 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{apiserver: true,},ClusterIP:10.217.4.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0127 07:52:11.779634 6307 services_controller.go:356] Processing sync for service openshift-service-ca-operator/metrics for network=default\\\\nF0127 07:52:11.779647 6307 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network contro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.699615 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.713937 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.737928 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d28aae4-3359-4d7e-8862-8db4b17a3403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d820bf9fe1c788b0bb17535226d96cd310fc188148091fa8b4186dd43baa5f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d407da23fc5babf6cec063fb31861fd8ac0f456a44746a83740c61c1e53a436c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0509b185e88f56d56bc6f8429a620d74855d938f1206d6c9193d8e93fc12ae91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.755834 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.770871 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.782188 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.782259 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.782274 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.782296 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.782313 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:29Z","lastTransitionTime":"2026-01-27T07:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.785959 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.801826 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.818222 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.832932 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.846498 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.867544 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:29Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.884540 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.884615 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.884626 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.884645 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.884659 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:29Z","lastTransitionTime":"2026-01-27T07:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.987715 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.987786 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.987804 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.987825 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:29 crc kubenswrapper[4787]: I0127 07:52:29.987841 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:29Z","lastTransitionTime":"2026-01-27T07:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.060883 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 17:13:45.013565453 +0000 UTC Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.091803 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.091886 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.091905 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.091938 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.091959 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:30Z","lastTransitionTime":"2026-01-27T07:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.194197 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.194240 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.194248 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.194266 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.194276 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:30Z","lastTransitionTime":"2026-01-27T07:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.296441 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.296481 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.296499 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.296519 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.296532 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:30Z","lastTransitionTime":"2026-01-27T07:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.399315 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.399391 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.399414 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.399448 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.399470 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:30Z","lastTransitionTime":"2026-01-27T07:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.506799 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.506857 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.506868 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.506888 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.506903 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:30Z","lastTransitionTime":"2026-01-27T07:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.512810 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/2.log" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.513455 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/1.log" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.516619 4787 generic.go:334] "Generic (PLEG): container finished" podID="fa44405c-042c-485a-ab6c-912dcd377751" containerID="adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f" exitCode=1 Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.516665 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerDied","Data":"adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f"} Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.516722 4787 scope.go:117] "RemoveContainer" containerID="023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.517397 4787 scope.go:117] "RemoveContainer" containerID="adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f" Jan 27 07:52:30 crc kubenswrapper[4787]: E0127 07:52:30.517600 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.533618 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d28aae4-3359-4d7e-8862-8db4b17a3403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d820bf9fe1c788b0bb17535226d96cd310fc188148091fa8b4186dd43baa5f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d407da23fc5babf6cec063fb31861fd8ac0f456a44746a83740c61c1e53a436c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0509b185e88f56d56bc6f8429a620d74855d938f1206d6c9193d8e93fc12ae91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.555463 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.569974 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.584234 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.599170 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.610095 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.610148 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.610160 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.610177 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.610189 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:30Z","lastTransitionTime":"2026-01-27T07:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.613699 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.625807 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.635596 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.645316 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.659955 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.678276 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.691391 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.709188 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.712287 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.712336 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.712346 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.712366 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.712379 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:30Z","lastTransitionTime":"2026-01-27T07:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.725440 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.745766 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.768661 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://023dbda584b2664829844328d67f417bacaf8e4934d8ae66ea7b9928e95ac995\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"message\\\":\\\"80c50373a5e91f08c5893365bfd5a5040449b1b6585a23 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{apiserver: true,},ClusterIP:10.217.4.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0127 07:52:11.779634 6307 services_controller.go:356] Processing sync for service openshift-service-ca-operator/metrics for network=default\\\\nF0127 07:52:11.779647 6307 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network contro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:30Z\\\",\\\"message\\\":\\\" 6533 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0127 07:52:30.089457 6533 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z]\\\\nI0127 07:52:30.089470 6533 obj_retry.go:303] Retry object setup: *v1.Pod openshift-image-registry/node-ca-rqwfc\\\\nI0127 07:52:30.089462 6533 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0127 07:52:30.089487 6533 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.788845 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.804012 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.815432 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.815486 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.815498 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.815517 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.815531 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:30Z","lastTransitionTime":"2026-01-27T07:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.917703 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.917761 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.917776 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.917797 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:30 crc kubenswrapper[4787]: I0127 07:52:30.917810 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:30Z","lastTransitionTime":"2026-01-27T07:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.021063 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.021105 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.021115 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.021132 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.021142 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.061856 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 09:15:51.867177862 +0000 UTC Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.076409 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.076514 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:31 crc kubenswrapper[4787]: E0127 07:52:31.076573 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:31 crc kubenswrapper[4787]: E0127 07:52:31.076736 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.076787 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:31 crc kubenswrapper[4787]: E0127 07:52:31.076888 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.077650 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:31 crc kubenswrapper[4787]: E0127 07:52:31.078140 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.123969 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.124018 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.124034 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.124064 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.124081 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.226510 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.226588 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.226605 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.226630 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.226647 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.329188 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.329236 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.329255 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.329274 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.329284 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.431376 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.431431 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.431454 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.431473 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.431487 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.523686 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/2.log" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.528206 4787 scope.go:117] "RemoveContainer" containerID="adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f" Jan 27 07:52:31 crc kubenswrapper[4787]: E0127 07:52:31.528403 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.534074 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.534126 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.534143 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.534165 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.534179 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.545338 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d28aae4-3359-4d7e-8862-8db4b17a3403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d820bf9fe1c788b0bb17535226d96cd310fc188148091fa8b4186dd43baa5f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d407da23fc5babf6cec063fb31861fd8ac0f456a44746a83740c61c1e53a436c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0509b185e88f56d56bc6f8429a620d74855d938f1206d6c9193d8e93fc12ae91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.563035 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.585772 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.600625 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.615676 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.633670 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.637325 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.637391 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.637401 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.637422 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.637437 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.652813 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.665704 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.679044 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.700370 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.722539 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.738775 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.740824 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.740887 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.740923 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.741020 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.741065 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.760967 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.789971 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.801450 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.801490 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.801501 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.801520 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.801532 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.815880 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: E0127 07:52:31.820082 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.824244 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.824314 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.824328 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.824368 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.824381 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: E0127 07:52:31.837244 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.839448 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:30Z\\\",\\\"message\\\":\\\" 6533 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0127 07:52:30.089457 6533 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z]\\\\nI0127 07:52:30.089470 6533 obj_retry.go:303] Retry object setup: *v1.Pod openshift-image-registry/node-ca-rqwfc\\\\nI0127 07:52:30.089462 6533 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0127 07:52:30.089487 6533 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.841264 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.841310 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.841326 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.841343 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.841354 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: E0127 07:52:31.852265 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.853078 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.855963 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.856012 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.856021 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.856041 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.856057 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.863549 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: E0127 07:52:31.869747 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.873994 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.874066 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.874102 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.874127 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.874141 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: E0127 07:52:31.886695 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:31Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:31 crc kubenswrapper[4787]: E0127 07:52:31.886919 4787 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.888888 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.888933 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.888968 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.888989 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.888999 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.992219 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.992266 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.992275 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.992294 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:31 crc kubenswrapper[4787]: I0127 07:52:31.992304 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:31Z","lastTransitionTime":"2026-01-27T07:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.062953 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 20:10:41.302902958 +0000 UTC Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.095663 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.095740 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.095764 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.095800 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.095820 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:32Z","lastTransitionTime":"2026-01-27T07:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.199030 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.199092 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.199111 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.199140 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.199160 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:32Z","lastTransitionTime":"2026-01-27T07:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.303196 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.303288 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.303324 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.303358 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.303382 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:32Z","lastTransitionTime":"2026-01-27T07:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.406758 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.406833 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.406850 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.406882 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.406909 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:32Z","lastTransitionTime":"2026-01-27T07:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.509594 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.509658 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.509675 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.509699 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.509724 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:32Z","lastTransitionTime":"2026-01-27T07:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.612487 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.612537 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.612574 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.612598 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.612614 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:32Z","lastTransitionTime":"2026-01-27T07:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.716735 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.716803 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.716823 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.716852 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.716872 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:32Z","lastTransitionTime":"2026-01-27T07:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.820779 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.820850 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.820869 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.820901 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.820921 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:32Z","lastTransitionTime":"2026-01-27T07:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.924406 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.924476 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.924490 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.924511 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:32 crc kubenswrapper[4787]: I0127 07:52:32.925323 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:32Z","lastTransitionTime":"2026-01-27T07:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.027921 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.028002 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.028024 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.028049 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.028064 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:33Z","lastTransitionTime":"2026-01-27T07:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.064988 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 06:50:55.644590615 +0000 UTC Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.076860 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.076943 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.076975 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.077002 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:33 crc kubenswrapper[4787]: E0127 07:52:33.077050 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:33 crc kubenswrapper[4787]: E0127 07:52:33.077303 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:33 crc kubenswrapper[4787]: E0127 07:52:33.077470 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:33 crc kubenswrapper[4787]: E0127 07:52:33.077600 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.131869 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.131945 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.131966 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.131995 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.132014 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:33Z","lastTransitionTime":"2026-01-27T07:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.235237 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.235763 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.235802 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.235827 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.235843 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:33Z","lastTransitionTime":"2026-01-27T07:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.340118 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.340176 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.340196 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.340227 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.340251 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:33Z","lastTransitionTime":"2026-01-27T07:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.443346 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.443406 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.443425 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.443455 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.443474 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:33Z","lastTransitionTime":"2026-01-27T07:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.547169 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.547251 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.547278 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.547313 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.547339 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:33Z","lastTransitionTime":"2026-01-27T07:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.650880 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.650946 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.650968 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.650999 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.651021 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:33Z","lastTransitionTime":"2026-01-27T07:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.755053 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.755112 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.755129 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.755158 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.755177 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:33Z","lastTransitionTime":"2026-01-27T07:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.857986 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.858031 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.858045 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.858070 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.858086 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:33Z","lastTransitionTime":"2026-01-27T07:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.962426 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.962542 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.962609 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.962645 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:33 crc kubenswrapper[4787]: I0127 07:52:33.962676 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:33Z","lastTransitionTime":"2026-01-27T07:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.065214 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 14:10:28.934992815 +0000 UTC Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.065577 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.066023 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.066103 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.066190 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.066326 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:34Z","lastTransitionTime":"2026-01-27T07:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.169475 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.169530 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.169541 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.169576 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.169588 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:34Z","lastTransitionTime":"2026-01-27T07:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.273023 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.273088 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.273107 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.273131 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.273145 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:34Z","lastTransitionTime":"2026-01-27T07:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.377590 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.378027 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.378162 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.378296 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.378431 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:34Z","lastTransitionTime":"2026-01-27T07:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.481650 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.481742 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.481756 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.481784 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.481800 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:34Z","lastTransitionTime":"2026-01-27T07:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.585896 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.585968 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.585987 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.586024 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.586050 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:34Z","lastTransitionTime":"2026-01-27T07:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.689124 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.689184 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.689195 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.689212 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.689229 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:34Z","lastTransitionTime":"2026-01-27T07:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.792510 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.792598 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.792618 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.792645 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.792658 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:34Z","lastTransitionTime":"2026-01-27T07:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.895334 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.895425 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.895450 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.895485 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.895510 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:34Z","lastTransitionTime":"2026-01-27T07:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.998838 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.998912 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.998925 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.998944 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:34 crc kubenswrapper[4787]: I0127 07:52:34.998956 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:34Z","lastTransitionTime":"2026-01-27T07:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.066418 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 15:14:41.364249672 +0000 UTC Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.076175 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.076175 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.076185 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.076218 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:35 crc kubenswrapper[4787]: E0127 07:52:35.076494 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:35 crc kubenswrapper[4787]: E0127 07:52:35.076644 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:35 crc kubenswrapper[4787]: E0127 07:52:35.076916 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:35 crc kubenswrapper[4787]: E0127 07:52:35.077145 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.096609 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.101467 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.101513 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.101544 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.101918 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.101961 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:35Z","lastTransitionTime":"2026-01-27T07:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.115662 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.139875 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.155901 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.172988 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.189403 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.204359 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.204464 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.204477 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.204503 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.204520 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:35Z","lastTransitionTime":"2026-01-27T07:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.212001 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:30Z\\\",\\\"message\\\":\\\" 6533 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0127 07:52:30.089457 6533 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z]\\\\nI0127 07:52:30.089470 6533 obj_retry.go:303] Retry object setup: *v1.Pod openshift-image-registry/node-ca-rqwfc\\\\nI0127 07:52:30.089462 6533 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0127 07:52:30.089487 6533 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.226770 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.240485 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.254867 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d28aae4-3359-4d7e-8862-8db4b17a3403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d820bf9fe1c788b0bb17535226d96cd310fc188148091fa8b4186dd43baa5f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d407da23fc5babf6cec063fb31861fd8ac0f456a44746a83740c61c1e53a436c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0509b185e88f56d56bc6f8429a620d74855d938f1206d6c9193d8e93fc12ae91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.272649 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.287725 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.304853 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.307749 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.307795 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.307814 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.307843 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.307863 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:35Z","lastTransitionTime":"2026-01-27T07:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.319103 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.331516 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.353267 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.372228 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.388820 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:35Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.411087 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.411149 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.411169 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.411196 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.411216 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:35Z","lastTransitionTime":"2026-01-27T07:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.513859 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.513889 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.513897 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.513912 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.513924 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:35Z","lastTransitionTime":"2026-01-27T07:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.617406 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.617443 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.617454 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.617469 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.617478 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:35Z","lastTransitionTime":"2026-01-27T07:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.720472 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.720509 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.720518 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.720531 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.720542 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:35Z","lastTransitionTime":"2026-01-27T07:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.823420 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.823456 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.823464 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.823478 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.823487 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:35Z","lastTransitionTime":"2026-01-27T07:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.926593 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.926646 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.926659 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.926676 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:35 crc kubenswrapper[4787]: I0127 07:52:35.926688 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:35Z","lastTransitionTime":"2026-01-27T07:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.029980 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.030026 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.030037 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.030056 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.030071 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:36Z","lastTransitionTime":"2026-01-27T07:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.066838 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 02:42:02.422486016 +0000 UTC Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.133137 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.133188 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.133202 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.133239 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.133253 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:36Z","lastTransitionTime":"2026-01-27T07:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.236873 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.236919 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.236932 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.236954 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.236967 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:36Z","lastTransitionTime":"2026-01-27T07:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.340128 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.340186 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.340198 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.340222 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.340235 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:36Z","lastTransitionTime":"2026-01-27T07:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.443198 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.443281 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.443317 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.443353 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.443376 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:36Z","lastTransitionTime":"2026-01-27T07:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.546739 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.546827 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.546842 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.546865 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.546906 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:36Z","lastTransitionTime":"2026-01-27T07:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.649894 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.650014 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.650065 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.650086 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.650099 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:36Z","lastTransitionTime":"2026-01-27T07:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.752626 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.752734 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.752758 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.752819 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.752839 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:36Z","lastTransitionTime":"2026-01-27T07:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.856456 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.856502 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.856512 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.856532 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.856543 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:36Z","lastTransitionTime":"2026-01-27T07:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.960260 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.960314 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.960325 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.960346 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:36 crc kubenswrapper[4787]: I0127 07:52:36.960357 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:36Z","lastTransitionTime":"2026-01-27T07:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.063453 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.063503 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.063520 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.063545 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.063594 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:37Z","lastTransitionTime":"2026-01-27T07:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.067018 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 03:03:35.95226279 +0000 UTC Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.076502 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.076524 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.076665 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:37 crc kubenswrapper[4787]: E0127 07:52:37.076744 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.076799 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:37 crc kubenswrapper[4787]: E0127 07:52:37.076989 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:37 crc kubenswrapper[4787]: E0127 07:52:37.077129 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:37 crc kubenswrapper[4787]: E0127 07:52:37.077322 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.167032 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.167088 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.167098 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.167115 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.167126 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:37Z","lastTransitionTime":"2026-01-27T07:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.270879 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.270953 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.270968 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.270993 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.271008 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:37Z","lastTransitionTime":"2026-01-27T07:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.375261 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.375319 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.375333 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.375354 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.375369 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:37Z","lastTransitionTime":"2026-01-27T07:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.479447 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.479503 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.479520 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.479545 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.479609 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:37Z","lastTransitionTime":"2026-01-27T07:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.583547 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.583655 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.583675 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.583705 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.583731 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:37Z","lastTransitionTime":"2026-01-27T07:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.687445 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.687512 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.687531 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.687579 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.687602 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:37Z","lastTransitionTime":"2026-01-27T07:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.790736 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.790814 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.790829 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.790977 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.791001 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:37Z","lastTransitionTime":"2026-01-27T07:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.894059 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.894130 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.894146 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.894172 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.894189 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:37Z","lastTransitionTime":"2026-01-27T07:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.997856 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.997948 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.997964 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.997987 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:37 crc kubenswrapper[4787]: I0127 07:52:37.998005 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:37Z","lastTransitionTime":"2026-01-27T07:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.067280 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 00:28:17.075609339 +0000 UTC Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.102484 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.102630 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.102658 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.102695 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.102720 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:38Z","lastTransitionTime":"2026-01-27T07:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.206833 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.206933 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.206959 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.206993 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.207019 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:38Z","lastTransitionTime":"2026-01-27T07:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.310425 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.310478 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.310495 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.310519 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.310537 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:38Z","lastTransitionTime":"2026-01-27T07:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.423399 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.423473 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.423493 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.423525 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.423575 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:38Z","lastTransitionTime":"2026-01-27T07:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.528037 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.528120 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.528146 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.528220 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.528245 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:38Z","lastTransitionTime":"2026-01-27T07:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.631030 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.631098 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.631116 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.631146 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.631168 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:38Z","lastTransitionTime":"2026-01-27T07:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.735101 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.735184 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.735202 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.735229 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.735248 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:38Z","lastTransitionTime":"2026-01-27T07:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.839159 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.839215 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.839224 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.839242 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.839252 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:38Z","lastTransitionTime":"2026-01-27T07:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.943298 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.943353 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.943368 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.943391 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:38 crc kubenswrapper[4787]: I0127 07:52:38.943405 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:38Z","lastTransitionTime":"2026-01-27T07:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.046620 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.046699 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.046715 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.046760 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.046776 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:39Z","lastTransitionTime":"2026-01-27T07:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.068416 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 22:05:16.130982658 +0000 UTC Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.076163 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.076228 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.076291 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.076234 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:39 crc kubenswrapper[4787]: E0127 07:52:39.076338 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:39 crc kubenswrapper[4787]: E0127 07:52:39.076547 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:39 crc kubenswrapper[4787]: E0127 07:52:39.076819 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:39 crc kubenswrapper[4787]: E0127 07:52:39.076897 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.150230 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.150299 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.150317 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.150345 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.150361 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:39Z","lastTransitionTime":"2026-01-27T07:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.254784 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.254954 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.254973 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.254998 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.255070 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:39Z","lastTransitionTime":"2026-01-27T07:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.358717 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.358784 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.358802 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.358829 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.358848 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:39Z","lastTransitionTime":"2026-01-27T07:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.462001 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.462103 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.462125 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.462152 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.462174 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:39Z","lastTransitionTime":"2026-01-27T07:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.565936 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.566020 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.566031 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.566051 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.566065 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:39Z","lastTransitionTime":"2026-01-27T07:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.669259 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.669371 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.669391 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.669421 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.669446 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:39Z","lastTransitionTime":"2026-01-27T07:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.772644 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.772679 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.772688 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.772705 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.772716 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:39Z","lastTransitionTime":"2026-01-27T07:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.875795 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.875867 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.875881 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.875906 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.875922 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:39Z","lastTransitionTime":"2026-01-27T07:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.978643 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.978695 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.978705 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.978723 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:39 crc kubenswrapper[4787]: I0127 07:52:39.978733 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:39Z","lastTransitionTime":"2026-01-27T07:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.069687 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 07:26:10.97669824 +0000 UTC Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.081594 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.081649 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.081661 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.081682 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.081698 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:40Z","lastTransitionTime":"2026-01-27T07:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.184115 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.184181 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.184193 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.184216 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.184237 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:40Z","lastTransitionTime":"2026-01-27T07:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.286957 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.287004 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.287015 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.287034 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.287055 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:40Z","lastTransitionTime":"2026-01-27T07:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.390298 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.390351 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.390364 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.390384 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.390395 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:40Z","lastTransitionTime":"2026-01-27T07:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.494281 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.494332 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.494343 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.494360 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.494370 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:40Z","lastTransitionTime":"2026-01-27T07:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.597711 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.597775 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.597792 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.597820 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.597837 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:40Z","lastTransitionTime":"2026-01-27T07:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.700360 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.700404 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.700415 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.700431 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.700442 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:40Z","lastTransitionTime":"2026-01-27T07:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.803398 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.803453 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.803467 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.803482 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.803492 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:40Z","lastTransitionTime":"2026-01-27T07:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.907295 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.907343 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.907354 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.907371 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:40 crc kubenswrapper[4787]: I0127 07:52:40.907382 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:40Z","lastTransitionTime":"2026-01-27T07:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.009502 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.009568 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.009581 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.009599 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.009612 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:41Z","lastTransitionTime":"2026-01-27T07:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.070270 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 10:07:44.395275114 +0000 UTC Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.075512 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.075615 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.075626 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:41 crc kubenswrapper[4787]: E0127 07:52:41.075788 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.075818 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:41 crc kubenswrapper[4787]: E0127 07:52:41.075871 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:41 crc kubenswrapper[4787]: E0127 07:52:41.075666 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:41 crc kubenswrapper[4787]: E0127 07:52:41.075945 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.112240 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.112284 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.112297 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.112313 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.112326 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:41Z","lastTransitionTime":"2026-01-27T07:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.215164 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.215213 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.215223 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.215240 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.215251 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:41Z","lastTransitionTime":"2026-01-27T07:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.317488 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.317528 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.317538 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.317583 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.317596 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:41Z","lastTransitionTime":"2026-01-27T07:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.419904 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.419951 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.419961 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.419979 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.419989 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:41Z","lastTransitionTime":"2026-01-27T07:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.522832 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.522885 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.522894 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.522911 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.522922 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:41Z","lastTransitionTime":"2026-01-27T07:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.626278 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.626331 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.626344 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.626363 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.626375 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:41Z","lastTransitionTime":"2026-01-27T07:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.729096 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.729179 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.729198 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.729218 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.729229 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:41Z","lastTransitionTime":"2026-01-27T07:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.833329 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.833378 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.833390 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.833408 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.833420 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:41Z","lastTransitionTime":"2026-01-27T07:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.936481 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.936528 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.936541 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.936582 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:41 crc kubenswrapper[4787]: I0127 07:52:41.936597 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:41Z","lastTransitionTime":"2026-01-27T07:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.039788 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.039832 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.039846 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.039863 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.039881 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:42Z","lastTransitionTime":"2026-01-27T07:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.071455 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 17:53:26.735688072 +0000 UTC Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.140157 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.140208 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.140218 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.140236 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.140249 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:42Z","lastTransitionTime":"2026-01-27T07:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:42 crc kubenswrapper[4787]: E0127 07:52:42.154793 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:42Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.159163 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.159198 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.159208 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.159220 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.159230 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:42Z","lastTransitionTime":"2026-01-27T07:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:42 crc kubenswrapper[4787]: E0127 07:52:42.176194 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:42Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.186625 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.186699 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.186712 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.186736 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.186750 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:42Z","lastTransitionTime":"2026-01-27T07:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:42 crc kubenswrapper[4787]: E0127 07:52:42.206895 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:42Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.212222 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.212268 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.212279 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.212294 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.212304 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:42Z","lastTransitionTime":"2026-01-27T07:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:42 crc kubenswrapper[4787]: E0127 07:52:42.224455 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:42Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.228704 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.228771 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.228787 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.228814 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.228829 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:42Z","lastTransitionTime":"2026-01-27T07:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:42 crc kubenswrapper[4787]: E0127 07:52:42.242106 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:42Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:42 crc kubenswrapper[4787]: E0127 07:52:42.242359 4787 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.245365 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.245425 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.245438 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.245463 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.245491 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:42Z","lastTransitionTime":"2026-01-27T07:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.347581 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.347632 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.347643 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.347662 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.347678 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:42Z","lastTransitionTime":"2026-01-27T07:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.451148 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.451224 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.451233 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.451251 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.451263 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:42Z","lastTransitionTime":"2026-01-27T07:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.554239 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.554298 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.554312 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.554331 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.554343 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:42Z","lastTransitionTime":"2026-01-27T07:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.657632 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.657710 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.657729 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.657767 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.657787 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:42Z","lastTransitionTime":"2026-01-27T07:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.760107 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.760170 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.760186 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.760210 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.760226 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:42Z","lastTransitionTime":"2026-01-27T07:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.863138 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.863196 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.863206 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.863225 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.863238 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:42Z","lastTransitionTime":"2026-01-27T07:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.966148 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.966210 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.966231 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.966259 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:42 crc kubenswrapper[4787]: I0127 07:52:42.966281 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:42Z","lastTransitionTime":"2026-01-27T07:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.069122 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.069173 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.069187 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.069207 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.069224 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:43Z","lastTransitionTime":"2026-01-27T07:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.072281 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 19:16:31.972185356 +0000 UTC Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.075585 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.075644 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.075719 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:43 crc kubenswrapper[4787]: E0127 07:52:43.075828 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.075896 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:43 crc kubenswrapper[4787]: E0127 07:52:43.076126 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:43 crc kubenswrapper[4787]: E0127 07:52:43.076112 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:43 crc kubenswrapper[4787]: E0127 07:52:43.076304 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.173327 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.173368 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.173381 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.173403 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.173423 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:43Z","lastTransitionTime":"2026-01-27T07:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.278066 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.278104 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.278116 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.278138 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.278152 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:43Z","lastTransitionTime":"2026-01-27T07:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.382973 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.383024 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.383036 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.383058 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.383070 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:43Z","lastTransitionTime":"2026-01-27T07:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.485843 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.485878 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.485889 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.485908 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.485923 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:43Z","lastTransitionTime":"2026-01-27T07:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.588155 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.588185 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.588197 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.588213 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.588226 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:43Z","lastTransitionTime":"2026-01-27T07:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.690988 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.691040 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.691052 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.691071 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.691084 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:43Z","lastTransitionTime":"2026-01-27T07:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.794229 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.794292 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.794309 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.794329 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.794347 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:43Z","lastTransitionTime":"2026-01-27T07:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.896450 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.896485 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.896495 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.896509 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.896521 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:43Z","lastTransitionTime":"2026-01-27T07:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.999167 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.999211 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.999221 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.999240 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:43 crc kubenswrapper[4787]: I0127 07:52:43.999251 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:43Z","lastTransitionTime":"2026-01-27T07:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.072928 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 02:03:45.890651277 +0000 UTC Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.102363 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.102424 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.102439 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.102459 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.102473 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:44Z","lastTransitionTime":"2026-01-27T07:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.204890 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.204950 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.204962 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.204984 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.204997 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:44Z","lastTransitionTime":"2026-01-27T07:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.307736 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.307768 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.307776 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.307789 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.307798 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:44Z","lastTransitionTime":"2026-01-27T07:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.411623 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.411688 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.411700 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.411726 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.411740 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:44Z","lastTransitionTime":"2026-01-27T07:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.515789 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.515867 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.515877 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.515899 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.515917 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:44Z","lastTransitionTime":"2026-01-27T07:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.618417 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.618473 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.618484 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.618506 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.618518 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:44Z","lastTransitionTime":"2026-01-27T07:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.721333 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.721382 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.721394 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.721416 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.721427 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:44Z","lastTransitionTime":"2026-01-27T07:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.824505 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.824595 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.824609 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.824628 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.824641 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:44Z","lastTransitionTime":"2026-01-27T07:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.927938 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.927997 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.928010 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.928032 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:44 crc kubenswrapper[4787]: I0127 07:52:44.928045 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:44Z","lastTransitionTime":"2026-01-27T07:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.031439 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.031505 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.031525 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.031581 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.031604 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:45Z","lastTransitionTime":"2026-01-27T07:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.074040 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 11:33:11.026921117 +0000 UTC Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.076289 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.076442 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.076455 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.076727 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:45 crc kubenswrapper[4787]: E0127 07:52:45.076745 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:45 crc kubenswrapper[4787]: E0127 07:52:45.076852 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:45 crc kubenswrapper[4787]: E0127 07:52:45.077236 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.077604 4787 scope.go:117] "RemoveContainer" containerID="adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f" Jan 27 07:52:45 crc kubenswrapper[4787]: E0127 07:52:45.077635 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:45 crc kubenswrapper[4787]: E0127 07:52:45.077871 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.144995 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.145051 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.145065 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.145088 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.145101 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:45Z","lastTransitionTime":"2026-01-27T07:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.148676 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:30Z\\\",\\\"message\\\":\\\" 6533 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0127 07:52:30.089457 6533 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z]\\\\nI0127 07:52:30.089470 6533 obj_retry.go:303] Retry object setup: *v1.Pod openshift-image-registry/node-ca-rqwfc\\\\nI0127 07:52:30.089462 6533 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0127 07:52:30.089487 6533 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.163998 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.174752 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.187699 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d28aae4-3359-4d7e-8862-8db4b17a3403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d820bf9fe1c788b0bb17535226d96cd310fc188148091fa8b4186dd43baa5f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d407da23fc5babf6cec063fb31861fd8ac0f456a44746a83740c61c1e53a436c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0509b185e88f56d56bc6f8429a620d74855d938f1206d6c9193d8e93fc12ae91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.202875 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.218230 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.233302 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.247667 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.247690 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.247698 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.247713 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.247722 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:45Z","lastTransitionTime":"2026-01-27T07:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.248983 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.264633 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.278316 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.289400 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.298955 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.314946 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.337317 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.351136 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.351215 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.351241 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.351270 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.351290 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:45Z","lastTransitionTime":"2026-01-27T07:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.356515 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.373000 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.386837 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.400987 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:45Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.453507 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.453584 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.453596 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.453610 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.453618 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:45Z","lastTransitionTime":"2026-01-27T07:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.556824 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.556888 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.556900 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.556917 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.556928 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:45Z","lastTransitionTime":"2026-01-27T07:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.584291 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs\") pod \"network-metrics-daemon-vws75\" (UID: \"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\") " pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:45 crc kubenswrapper[4787]: E0127 07:52:45.584482 4787 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:52:45 crc kubenswrapper[4787]: E0127 07:52:45.584576 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs podName:3969f21f-ab36-49b4-9a9c-02cf19e65ad0 nodeName:}" failed. No retries permitted until 2026-01-27 07:53:17.584529838 +0000 UTC m=+103.236885330 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs") pod "network-metrics-daemon-vws75" (UID: "3969f21f-ab36-49b4-9a9c-02cf19e65ad0") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.660085 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.660152 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.660172 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.660198 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.660218 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:45Z","lastTransitionTime":"2026-01-27T07:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.762609 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.762724 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.762743 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.762764 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.762778 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:45Z","lastTransitionTime":"2026-01-27T07:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.865467 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.865520 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.865531 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.865570 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.865583 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:45Z","lastTransitionTime":"2026-01-27T07:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.968123 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.968168 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.968180 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.968197 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:45 crc kubenswrapper[4787]: I0127 07:52:45.968207 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:45Z","lastTransitionTime":"2026-01-27T07:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.072053 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.072106 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.072118 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.072138 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.072149 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:46Z","lastTransitionTime":"2026-01-27T07:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.074537 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 11:37:01.240537854 +0000 UTC Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.175657 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.175705 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.175717 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.175736 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.175748 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:46Z","lastTransitionTime":"2026-01-27T07:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.277907 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.277949 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.277957 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.277973 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.277982 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:46Z","lastTransitionTime":"2026-01-27T07:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.381698 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.381741 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.381754 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.381775 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.381788 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:46Z","lastTransitionTime":"2026-01-27T07:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.483953 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.483994 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.484003 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.484019 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.484030 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:46Z","lastTransitionTime":"2026-01-27T07:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.586004 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.586036 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.586046 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.586059 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.586070 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:46Z","lastTransitionTime":"2026-01-27T07:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.689046 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.689143 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.689171 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.689208 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.689241 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:46Z","lastTransitionTime":"2026-01-27T07:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.792185 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.792244 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.792265 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.792293 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.792313 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:46Z","lastTransitionTime":"2026-01-27T07:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.896314 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.896399 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.896418 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.896461 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:46 crc kubenswrapper[4787]: I0127 07:52:46.896478 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:46Z","lastTransitionTime":"2026-01-27T07:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.000023 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.000076 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.000089 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.000107 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.000121 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:47Z","lastTransitionTime":"2026-01-27T07:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.075124 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 23:15:50.691439916 +0000 UTC Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.076475 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.076530 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.076581 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.076539 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:47 crc kubenswrapper[4787]: E0127 07:52:47.076664 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:47 crc kubenswrapper[4787]: E0127 07:52:47.076772 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:47 crc kubenswrapper[4787]: E0127 07:52:47.076827 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:47 crc kubenswrapper[4787]: E0127 07:52:47.076862 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.102595 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.102664 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.102685 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.102711 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.102732 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:47Z","lastTransitionTime":"2026-01-27T07:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.207151 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.207205 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.207227 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.207253 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.207272 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:47Z","lastTransitionTime":"2026-01-27T07:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.310341 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.310389 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.310399 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.310417 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.310427 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:47Z","lastTransitionTime":"2026-01-27T07:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.413475 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.413521 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.413532 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.413565 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.413582 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:47Z","lastTransitionTime":"2026-01-27T07:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.516316 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.516366 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.516382 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.516400 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.516410 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:47Z","lastTransitionTime":"2026-01-27T07:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.587962 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rqjpz_e6f78168-0b0d-464d-b1c7-00bb9a69c0d1/kube-multus/0.log" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.588019 4787 generic.go:334] "Generic (PLEG): container finished" podID="e6f78168-0b0d-464d-b1c7-00bb9a69c0d1" containerID="e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e" exitCode=1 Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.588048 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rqjpz" event={"ID":"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1","Type":"ContainerDied","Data":"e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e"} Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.588410 4787 scope.go:117] "RemoveContainer" containerID="e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.606840 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:30Z\\\",\\\"message\\\":\\\" 6533 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0127 07:52:30.089457 6533 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z]\\\\nI0127 07:52:30.089470 6533 obj_retry.go:303] Retry object setup: *v1.Pod openshift-image-registry/node-ca-rqwfc\\\\nI0127 07:52:30.089462 6533 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0127 07:52:30.089487 6533 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.620359 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.620146 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.620440 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.620456 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.620479 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.620500 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:47Z","lastTransitionTime":"2026-01-27T07:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.632834 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.644440 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d28aae4-3359-4d7e-8862-8db4b17a3403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d820bf9fe1c788b0bb17535226d96cd310fc188148091fa8b4186dd43baa5f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d407da23fc5babf6cec063fb31861fd8ac0f456a44746a83740c61c1e53a436c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0509b185e88f56d56bc6f8429a620d74855d938f1206d6c9193d8e93fc12ae91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.661700 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.677170 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.696188 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.711905 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.723831 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.723861 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.723888 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.723903 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.723912 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:47Z","lastTransitionTime":"2026-01-27T07:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.727890 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.741172 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.754105 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.766399 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.795069 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.811244 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.826022 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.826795 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.826867 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.826882 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.826901 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.826913 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:47Z","lastTransitionTime":"2026-01-27T07:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.838468 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.851821 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:46Z\\\",\\\"message\\\":\\\"2026-01-27T07:52:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3d832606-6840-4320-b93f-1be93a343206\\\\n2026-01-27T07:52:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3d832606-6840-4320-b93f-1be93a343206 to /host/opt/cni/bin/\\\\n2026-01-27T07:52:01Z [verbose] multus-daemon started\\\\n2026-01-27T07:52:01Z [verbose] Readiness Indicator file check\\\\n2026-01-27T07:52:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.869035 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:47Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.930516 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.930581 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.930593 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.930611 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:47 crc kubenswrapper[4787]: I0127 07:52:47.930622 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:47Z","lastTransitionTime":"2026-01-27T07:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.033745 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.033793 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.033809 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.033829 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.033843 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:48Z","lastTransitionTime":"2026-01-27T07:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.075656 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 10:43:51.328486492 +0000 UTC Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.136848 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.136904 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.136913 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.136930 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.136940 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:48Z","lastTransitionTime":"2026-01-27T07:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.240272 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.240309 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.240321 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.240340 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.240354 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:48Z","lastTransitionTime":"2026-01-27T07:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.343138 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.343179 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.343188 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.343201 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.343212 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:48Z","lastTransitionTime":"2026-01-27T07:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.445718 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.445792 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.445801 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.445820 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.445831 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:48Z","lastTransitionTime":"2026-01-27T07:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.548812 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.548883 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.548898 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.548923 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.548938 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:48Z","lastTransitionTime":"2026-01-27T07:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.593776 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rqjpz_e6f78168-0b0d-464d-b1c7-00bb9a69c0d1/kube-multus/0.log" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.593845 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rqjpz" event={"ID":"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1","Type":"ContainerStarted","Data":"4924d9f32dbc90c38c70d18db5a32bdabb76d288c34457331804829e451cf169"} Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.608697 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.629114 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:30Z\\\",\\\"message\\\":\\\" 6533 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0127 07:52:30.089457 6533 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z]\\\\nI0127 07:52:30.089470 6533 obj_retry.go:303] Retry object setup: *v1.Pod openshift-image-registry/node-ca-rqwfc\\\\nI0127 07:52:30.089462 6533 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0127 07:52:30.089487 6533 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.644264 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.653769 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.653813 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.653825 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.653843 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.653855 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:48Z","lastTransitionTime":"2026-01-27T07:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.656739 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.670909 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d28aae4-3359-4d7e-8862-8db4b17a3403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d820bf9fe1c788b0bb17535226d96cd310fc188148091fa8b4186dd43baa5f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d407da23fc5babf6cec063fb31861fd8ac0f456a44746a83740c61c1e53a436c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0509b185e88f56d56bc6f8429a620d74855d938f1206d6c9193d8e93fc12ae91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.684961 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.696785 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.707114 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.717810 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.729251 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.741991 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.755265 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.756332 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.756417 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.756431 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.756447 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.756806 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:48Z","lastTransitionTime":"2026-01-27T07:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.769726 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.783844 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4924d9f32dbc90c38c70d18db5a32bdabb76d288c34457331804829e451cf169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:46Z\\\",\\\"message\\\":\\\"2026-01-27T07:52:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3d832606-6840-4320-b93f-1be93a343206\\\\n2026-01-27T07:52:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3d832606-6840-4320-b93f-1be93a343206 to /host/opt/cni/bin/\\\\n2026-01-27T07:52:01Z [verbose] multus-daemon started\\\\n2026-01-27T07:52:01Z [verbose] Readiness Indicator file check\\\\n2026-01-27T07:52:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.807198 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.825408 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.839443 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.852876 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:48Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.860068 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.860136 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.860149 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.860169 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.860186 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:48Z","lastTransitionTime":"2026-01-27T07:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.963781 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.964263 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.964422 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.964634 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:48 crc kubenswrapper[4787]: I0127 07:52:48.964782 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:48Z","lastTransitionTime":"2026-01-27T07:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.068129 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.068176 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.068187 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.068209 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.068222 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:49Z","lastTransitionTime":"2026-01-27T07:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.075907 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 23:45:38.028803353 +0000 UTC Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.076032 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.076063 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.076143 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:49 crc kubenswrapper[4787]: E0127 07:52:49.076171 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:49 crc kubenswrapper[4787]: E0127 07:52:49.076308 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:49 crc kubenswrapper[4787]: E0127 07:52:49.076394 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.077187 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:49 crc kubenswrapper[4787]: E0127 07:52:49.077368 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.170927 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.170973 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.170986 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.171004 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.171015 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:49Z","lastTransitionTime":"2026-01-27T07:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.274351 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.274403 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.274417 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.274435 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.274477 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:49Z","lastTransitionTime":"2026-01-27T07:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.376824 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.376889 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.376909 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.376938 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.376964 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:49Z","lastTransitionTime":"2026-01-27T07:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.480885 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.480960 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.480971 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.480991 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.481002 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:49Z","lastTransitionTime":"2026-01-27T07:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.583322 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.583364 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.583373 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.583390 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.583400 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:49Z","lastTransitionTime":"2026-01-27T07:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.686974 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.687050 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.687083 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.687119 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.687149 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:49Z","lastTransitionTime":"2026-01-27T07:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.791271 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.791324 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.791333 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.791351 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.791363 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:49Z","lastTransitionTime":"2026-01-27T07:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.894384 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.894500 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.894536 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.894610 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.894642 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:49Z","lastTransitionTime":"2026-01-27T07:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.998580 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.998636 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.998648 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.998666 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:49 crc kubenswrapper[4787]: I0127 07:52:49.998678 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:49Z","lastTransitionTime":"2026-01-27T07:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.076840 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 00:13:37.514423681 +0000 UTC Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.102356 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.102422 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.102439 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.102462 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.102476 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:50Z","lastTransitionTime":"2026-01-27T07:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.205477 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.205541 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.205585 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.205614 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.205634 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:50Z","lastTransitionTime":"2026-01-27T07:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.309033 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.309094 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.309107 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.309126 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.309137 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:50Z","lastTransitionTime":"2026-01-27T07:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.412201 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.412258 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.412273 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.412294 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.412308 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:50Z","lastTransitionTime":"2026-01-27T07:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.516101 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.516190 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.516214 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.516244 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.516262 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:50Z","lastTransitionTime":"2026-01-27T07:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.618455 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.618503 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.618516 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.618534 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.618544 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:50Z","lastTransitionTime":"2026-01-27T07:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.721381 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.721450 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.721462 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.721485 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.721498 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:50Z","lastTransitionTime":"2026-01-27T07:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.824080 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.824159 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.824179 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.824209 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.824231 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:50Z","lastTransitionTime":"2026-01-27T07:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.927078 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.927150 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.927167 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.927194 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:50 crc kubenswrapper[4787]: I0127 07:52:50.927215 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:50Z","lastTransitionTime":"2026-01-27T07:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.031213 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.031290 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.031312 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.031344 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.031370 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:51Z","lastTransitionTime":"2026-01-27T07:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.076493 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.076493 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:51 crc kubenswrapper[4787]: E0127 07:52:51.076702 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:51 crc kubenswrapper[4787]: E0127 07:52:51.076721 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.076536 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.076513 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:51 crc kubenswrapper[4787]: E0127 07:52:51.076791 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.077000 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 08:06:19.95521626 +0000 UTC Jan 27 07:52:51 crc kubenswrapper[4787]: E0127 07:52:51.077007 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.134275 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.134352 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.134368 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.134398 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.134427 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:51Z","lastTransitionTime":"2026-01-27T07:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.237219 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.237280 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.237292 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.237312 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.237325 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:51Z","lastTransitionTime":"2026-01-27T07:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.340612 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.340659 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.340669 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.340685 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.340694 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:51Z","lastTransitionTime":"2026-01-27T07:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.445789 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.445868 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.445885 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.445915 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.445934 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:51Z","lastTransitionTime":"2026-01-27T07:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.549406 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.549450 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.549458 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.549474 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.549492 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:51Z","lastTransitionTime":"2026-01-27T07:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.652080 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.652130 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.652142 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.652157 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.652169 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:51Z","lastTransitionTime":"2026-01-27T07:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.756048 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.756169 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.756197 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.756232 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.756259 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:51Z","lastTransitionTime":"2026-01-27T07:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.859025 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.859068 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.859079 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.859094 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.859104 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:51Z","lastTransitionTime":"2026-01-27T07:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.961283 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.961321 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.961330 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.961343 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:51 crc kubenswrapper[4787]: I0127 07:52:51.961353 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:51Z","lastTransitionTime":"2026-01-27T07:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.063478 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.063517 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.063528 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.063544 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.063569 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:52Z","lastTransitionTime":"2026-01-27T07:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.077133 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 05:28:57.306933215 +0000 UTC Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.165455 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.165494 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.165503 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.165519 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.165532 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:52Z","lastTransitionTime":"2026-01-27T07:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.269517 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.269651 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.269679 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.269720 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.269751 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:52Z","lastTransitionTime":"2026-01-27T07:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.372164 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.372206 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.372218 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.372235 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.372247 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:52Z","lastTransitionTime":"2026-01-27T07:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.459129 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.459190 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.459201 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.459218 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.459228 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:52Z","lastTransitionTime":"2026-01-27T07:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:52 crc kubenswrapper[4787]: E0127 07:52:52.473523 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:52Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.478471 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.478512 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.478523 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.478539 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.478566 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:52Z","lastTransitionTime":"2026-01-27T07:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:52 crc kubenswrapper[4787]: E0127 07:52:52.491254 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:52Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.496580 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.496617 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.496627 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.496644 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.496656 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:52Z","lastTransitionTime":"2026-01-27T07:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:52 crc kubenswrapper[4787]: E0127 07:52:52.511405 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:52Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.516254 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.516301 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.516317 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.516336 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.516351 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:52Z","lastTransitionTime":"2026-01-27T07:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:52 crc kubenswrapper[4787]: E0127 07:52:52.529722 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:52Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.533883 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.533911 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.533921 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.533935 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.533945 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:52Z","lastTransitionTime":"2026-01-27T07:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:52 crc kubenswrapper[4787]: E0127 07:52:52.546324 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:52Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:52 crc kubenswrapper[4787]: E0127 07:52:52.546476 4787 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.548082 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.548125 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.548137 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.548158 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.548168 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:52Z","lastTransitionTime":"2026-01-27T07:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.650828 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.650878 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.650898 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.650918 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.650958 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:52Z","lastTransitionTime":"2026-01-27T07:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.753985 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.754107 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.754130 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.754160 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.754180 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:52Z","lastTransitionTime":"2026-01-27T07:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.857756 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.857811 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.857830 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.857849 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.857862 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:52Z","lastTransitionTime":"2026-01-27T07:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.961613 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.961986 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.962089 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.962201 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:52 crc kubenswrapper[4787]: I0127 07:52:52.962317 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:52Z","lastTransitionTime":"2026-01-27T07:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.066127 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.066200 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.066214 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.066234 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.066248 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:53Z","lastTransitionTime":"2026-01-27T07:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.075844 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.075924 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:53 crc kubenswrapper[4787]: E0127 07:52:53.076198 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.075960 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:53 crc kubenswrapper[4787]: E0127 07:52:53.076650 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.075925 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:53 crc kubenswrapper[4787]: E0127 07:52:53.076373 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:53 crc kubenswrapper[4787]: E0127 07:52:53.077086 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.077965 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 16:11:27.522865353 +0000 UTC Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.168960 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.169500 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.169599 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.169674 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.169745 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:53Z","lastTransitionTime":"2026-01-27T07:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.273970 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.274373 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.274483 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.274621 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.274727 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:53Z","lastTransitionTime":"2026-01-27T07:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.377783 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.377880 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.377901 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.377932 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.377953 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:53Z","lastTransitionTime":"2026-01-27T07:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.481599 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.481984 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.482102 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.482222 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.482321 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:53Z","lastTransitionTime":"2026-01-27T07:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.585523 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.585648 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.585670 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.585697 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.585717 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:53Z","lastTransitionTime":"2026-01-27T07:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.689521 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.689614 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.689637 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.689666 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.689694 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:53Z","lastTransitionTime":"2026-01-27T07:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.793806 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.793924 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.793962 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.794002 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.794028 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:53Z","lastTransitionTime":"2026-01-27T07:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.896951 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.897019 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.897041 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.897069 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:53 crc kubenswrapper[4787]: I0127 07:52:53.897089 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:53Z","lastTransitionTime":"2026-01-27T07:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.000134 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.000501 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.000599 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.000679 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.000765 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:54Z","lastTransitionTime":"2026-01-27T07:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.079070 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 09:06:46.169117937 +0000 UTC Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.104344 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.104665 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.104838 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.104988 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.105276 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:54Z","lastTransitionTime":"2026-01-27T07:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.208250 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.208823 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.209070 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.209295 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.209673 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:54Z","lastTransitionTime":"2026-01-27T07:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.313823 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.313903 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.313921 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.313948 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.313976 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:54Z","lastTransitionTime":"2026-01-27T07:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.416649 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.416717 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.416737 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.416763 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.416782 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:54Z","lastTransitionTime":"2026-01-27T07:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.520132 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.520190 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.520203 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.520229 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.520243 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:54Z","lastTransitionTime":"2026-01-27T07:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.623382 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.623429 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.623442 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.623461 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.623477 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:54Z","lastTransitionTime":"2026-01-27T07:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.727729 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.727791 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.727810 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.727836 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.727856 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:54Z","lastTransitionTime":"2026-01-27T07:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.831196 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.831277 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.831298 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.831328 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.831349 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:54Z","lastTransitionTime":"2026-01-27T07:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.934912 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.935210 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.935314 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.935401 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:54 crc kubenswrapper[4787]: I0127 07:52:54.935481 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:54Z","lastTransitionTime":"2026-01-27T07:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.039415 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.039460 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.039472 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.039491 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.039507 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:55Z","lastTransitionTime":"2026-01-27T07:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.076615 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.076629 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.076783 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:55 crc kubenswrapper[4787]: E0127 07:52:55.076813 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:55 crc kubenswrapper[4787]: E0127 07:52:55.076989 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:55 crc kubenswrapper[4787]: E0127 07:52:55.077204 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.077384 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:55 crc kubenswrapper[4787]: E0127 07:52:55.077665 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.079884 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 15:45:57.45751022 +0000 UTC Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.095830 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.117645 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4924d9f32dbc90c38c70d18db5a32bdabb76d288c34457331804829e451cf169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:46Z\\\",\\\"message\\\":\\\"2026-01-27T07:52:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3d832606-6840-4320-b93f-1be93a343206\\\\n2026-01-27T07:52:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3d832606-6840-4320-b93f-1be93a343206 to /host/opt/cni/bin/\\\\n2026-01-27T07:52:01Z [verbose] multus-daemon started\\\\n2026-01-27T07:52:01Z [verbose] Readiness Indicator file check\\\\n2026-01-27T07:52:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.141369 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.155454 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.156006 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.156186 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.156355 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.156483 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:55Z","lastTransitionTime":"2026-01-27T07:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.187428 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.213849 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.237800 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.256157 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.260717 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.260790 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.260812 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.260840 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.260860 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:55Z","lastTransitionTime":"2026-01-27T07:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.285296 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:30Z\\\",\\\"message\\\":\\\" 6533 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0127 07:52:30.089457 6533 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z]\\\\nI0127 07:52:30.089470 6533 obj_retry.go:303] Retry object setup: *v1.Pod openshift-image-registry/node-ca-rqwfc\\\\nI0127 07:52:30.089462 6533 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0127 07:52:30.089487 6533 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.300301 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.321145 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.340243 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d28aae4-3359-4d7e-8862-8db4b17a3403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d820bf9fe1c788b0bb17535226d96cd310fc188148091fa8b4186dd43baa5f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d407da23fc5babf6cec063fb31861fd8ac0f456a44746a83740c61c1e53a436c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0509b185e88f56d56bc6f8429a620d74855d938f1206d6c9193d8e93fc12ae91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.361326 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.363633 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.363674 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.363686 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.363709 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.363725 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:55Z","lastTransitionTime":"2026-01-27T07:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.383630 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.399012 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.414656 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.431048 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.456782 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.467684 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.467729 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.467740 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.467756 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.467770 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:55Z","lastTransitionTime":"2026-01-27T07:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.473089 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:55Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.570600 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.570650 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.570659 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.570677 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.570687 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:55Z","lastTransitionTime":"2026-01-27T07:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.673429 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.673480 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.673496 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.673516 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.673534 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:55Z","lastTransitionTime":"2026-01-27T07:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.776768 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.776826 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.776841 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.776864 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.776887 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:55Z","lastTransitionTime":"2026-01-27T07:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.880190 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.880254 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.880267 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.880290 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.880307 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:55Z","lastTransitionTime":"2026-01-27T07:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.983443 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.983502 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.983521 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.983581 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:55 crc kubenswrapper[4787]: I0127 07:52:55.983602 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:55Z","lastTransitionTime":"2026-01-27T07:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.076763 4787 scope.go:117] "RemoveContainer" containerID="adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.080516 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 19:41:50.553613017 +0000 UTC Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.086301 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.086338 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.086347 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.086364 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.086376 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:56Z","lastTransitionTime":"2026-01-27T07:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.189137 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.189185 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.189196 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.189213 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.189226 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:56Z","lastTransitionTime":"2026-01-27T07:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.293787 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.293842 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.293853 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.293875 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.293889 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:56Z","lastTransitionTime":"2026-01-27T07:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.397995 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.398061 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.398075 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.398098 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.398111 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:56Z","lastTransitionTime":"2026-01-27T07:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.500633 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.500687 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.500697 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.500714 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.500726 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:56Z","lastTransitionTime":"2026-01-27T07:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.603692 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.603740 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.603753 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.603772 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.603786 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:56Z","lastTransitionTime":"2026-01-27T07:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.622591 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/2.log" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.625815 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerStarted","Data":"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637"} Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.626335 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.648057 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.662258 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.676416 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.690415 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.705667 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4924d9f32dbc90c38c70d18db5a32bdabb76d288c34457331804829e451cf169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:46Z\\\",\\\"message\\\":\\\"2026-01-27T07:52:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3d832606-6840-4320-b93f-1be93a343206\\\\n2026-01-27T07:52:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3d832606-6840-4320-b93f-1be93a343206 to /host/opt/cni/bin/\\\\n2026-01-27T07:52:01Z [verbose] multus-daemon started\\\\n2026-01-27T07:52:01Z [verbose] Readiness Indicator file check\\\\n2026-01-27T07:52:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.706639 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.706701 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.706714 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.706734 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.706744 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:56Z","lastTransitionTime":"2026-01-27T07:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.724048 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.744715 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:30Z\\\",\\\"message\\\":\\\" 6533 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0127 07:52:30.089457 6533 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z]\\\\nI0127 07:52:30.089470 6533 obj_retry.go:303] Retry object setup: *v1.Pod openshift-image-registry/node-ca-rqwfc\\\\nI0127 07:52:30.089462 6533 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0127 07:52:30.089487 6533 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.759576 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.786708 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.801026 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d28aae4-3359-4d7e-8862-8db4b17a3403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d820bf9fe1c788b0bb17535226d96cd310fc188148091fa8b4186dd43baa5f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d407da23fc5babf6cec063fb31861fd8ac0f456a44746a83740c61c1e53a436c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0509b185e88f56d56bc6f8429a620d74855d938f1206d6c9193d8e93fc12ae91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.808917 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.808951 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.808962 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.808980 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.808995 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:56Z","lastTransitionTime":"2026-01-27T07:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.815399 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.827564 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.839754 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.854708 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.868527 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.882714 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.895385 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.905857 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:56Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.911576 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.911629 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.911642 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.911661 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:56 crc kubenswrapper[4787]: I0127 07:52:56.911673 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:56Z","lastTransitionTime":"2026-01-27T07:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.015026 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.015075 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.015086 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.015104 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.015115 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:57Z","lastTransitionTime":"2026-01-27T07:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.076751 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.076775 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.076823 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.076898 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:57 crc kubenswrapper[4787]: E0127 07:52:57.076931 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:57 crc kubenswrapper[4787]: E0127 07:52:57.076963 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:57 crc kubenswrapper[4787]: E0127 07:52:57.077146 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:57 crc kubenswrapper[4787]: E0127 07:52:57.077245 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.081615 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 04:32:18.268703556 +0000 UTC Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.117465 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.117525 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.117599 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.117631 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.117652 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:57Z","lastTransitionTime":"2026-01-27T07:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.221579 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.221652 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.221673 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.221708 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.221734 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:57Z","lastTransitionTime":"2026-01-27T07:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.325691 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.325772 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.325793 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.325825 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.325845 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:57Z","lastTransitionTime":"2026-01-27T07:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.429536 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.429614 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.429636 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.429662 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.429680 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:57Z","lastTransitionTime":"2026-01-27T07:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.532737 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.532804 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.532818 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.532839 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.532852 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:57Z","lastTransitionTime":"2026-01-27T07:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.633138 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/3.log" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.633959 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/2.log" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.635312 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.635423 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.635444 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.635467 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.635513 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:57Z","lastTransitionTime":"2026-01-27T07:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.638047 4787 generic.go:334] "Generic (PLEG): container finished" podID="fa44405c-042c-485a-ab6c-912dcd377751" containerID="6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637" exitCode=1 Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.638136 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerDied","Data":"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637"} Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.638214 4787 scope.go:117] "RemoveContainer" containerID="adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.639219 4787 scope.go:117] "RemoveContainer" containerID="6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637" Jan 27 07:52:57 crc kubenswrapper[4787]: E0127 07:52:57.639491 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.664115 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.683817 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.700727 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.713874 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.724087 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.746565 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.746614 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.746624 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.746641 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.746652 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:57Z","lastTransitionTime":"2026-01-27T07:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.749615 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.763866 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.777525 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.793537 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.810476 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4924d9f32dbc90c38c70d18db5a32bdabb76d288c34457331804829e451cf169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:46Z\\\",\\\"message\\\":\\\"2026-01-27T07:52:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3d832606-6840-4320-b93f-1be93a343206\\\\n2026-01-27T07:52:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3d832606-6840-4320-b93f-1be93a343206 to /host/opt/cni/bin/\\\\n2026-01-27T07:52:01Z [verbose] multus-daemon started\\\\n2026-01-27T07:52:01Z [verbose] Readiness Indicator file check\\\\n2026-01-27T07:52:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.826772 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.849611 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adb98705453ca5a2191ab57c0255b907d1e1d505962fddc38dd440a1d56d505f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:30Z\\\",\\\"message\\\":\\\" 6533 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0127 07:52:30.089457 6533 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:30Z is after 2025-08-24T17:21:41Z]\\\\nI0127 07:52:30.089470 6533 obj_retry.go:303] Retry object setup: *v1.Pod openshift-image-registry/node-ca-rqwfc\\\\nI0127 07:52:30.089462 6533 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0127 07:52:30.089487 6533 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:57Z\\\",\\\"message\\\":\\\"57 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/community-operators\\\\\\\"}\\\\nI0127 07:52:57.037280 6957 lb_config.go:1031] Cluster endpoints for openshift-image-registry/image-registry for network=default are: map[]\\\\nI0127 07:52:57.037300 6957 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}\\\\nI0127 07:52:57.037235 6957 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-oauth-apiserver/api\\\\\\\"}\\\\nI0127 07:52:57.037326 6957 services_controller.go:360] Finished syncing service cluster-autoscaler-operator on namespace openshift-machine-api for network=default : 4.740846ms\\\\nI0127 07:52:57.037332 6957 services_controller.go:360] Finished syncing service api on namespace openshift-oauth-apiserver for network=default : 1.998381ms\\\\nI0127 07:52:57.037373 6957 services_controller.go:356] Processing sync for service openshift-ingress-canary/ingress-canary for network=default\\\\nF0127 07:52:57.037046 6957 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e6002\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.850063 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.850158 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.850188 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.850217 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.850237 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:57Z","lastTransitionTime":"2026-01-27T07:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.866034 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.884095 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.901040 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d28aae4-3359-4d7e-8862-8db4b17a3403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d820bf9fe1c788b0bb17535226d96cd310fc188148091fa8b4186dd43baa5f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d407da23fc5babf6cec063fb31861fd8ac0f456a44746a83740c61c1e53a436c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0509b185e88f56d56bc6f8429a620d74855d938f1206d6c9193d8e93fc12ae91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.917607 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.932014 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.948727 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:57Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.954176 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.954241 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.954261 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.954284 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:57 crc kubenswrapper[4787]: I0127 07:52:57.954301 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:57Z","lastTransitionTime":"2026-01-27T07:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.058043 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.058124 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.058144 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.058179 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.058199 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:58Z","lastTransitionTime":"2026-01-27T07:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.081847 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 16:47:40.592824393 +0000 UTC Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.160523 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.160584 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.160598 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.160622 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.160639 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:58Z","lastTransitionTime":"2026-01-27T07:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.263896 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.263954 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.263969 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.263988 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.264003 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:58Z","lastTransitionTime":"2026-01-27T07:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.367830 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.367884 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.367900 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.367923 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.367937 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:58Z","lastTransitionTime":"2026-01-27T07:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.470768 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.470851 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.470875 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.470912 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.470932 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:58Z","lastTransitionTime":"2026-01-27T07:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.574940 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.574984 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.574999 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.575017 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.575027 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:58Z","lastTransitionTime":"2026-01-27T07:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.643622 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/3.log" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.648069 4787 scope.go:117] "RemoveContainer" containerID="6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637" Jan 27 07:52:58 crc kubenswrapper[4787]: E0127 07:52:58.648263 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.667903 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d28aae4-3359-4d7e-8862-8db4b17a3403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d820bf9fe1c788b0bb17535226d96cd310fc188148091fa8b4186dd43baa5f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d407da23fc5babf6cec063fb31861fd8ac0f456a44746a83740c61c1e53a436c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0509b185e88f56d56bc6f8429a620d74855d938f1206d6c9193d8e93fc12ae91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.678292 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.678327 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.678339 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.678362 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.678375 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:58Z","lastTransitionTime":"2026-01-27T07:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.685219 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.698893 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.710325 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.721680 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.734130 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.746765 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.758436 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.768408 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.782472 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.782522 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.782537 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.782577 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.782593 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:58Z","lastTransitionTime":"2026-01-27T07:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.787629 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4924d9f32dbc90c38c70d18db5a32bdabb76d288c34457331804829e451cf169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:46Z\\\",\\\"message\\\":\\\"2026-01-27T07:52:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3d832606-6840-4320-b93f-1be93a343206\\\\n2026-01-27T07:52:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3d832606-6840-4320-b93f-1be93a343206 to /host/opt/cni/bin/\\\\n2026-01-27T07:52:01Z [verbose] multus-daemon started\\\\n2026-01-27T07:52:01Z [verbose] Readiness Indicator file check\\\\n2026-01-27T07:52:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.811411 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.844908 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.866457 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.886144 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.886195 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.886209 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.886235 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.886250 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:58Z","lastTransitionTime":"2026-01-27T07:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.888075 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.905390 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.939460 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:57Z\\\",\\\"message\\\":\\\"57 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/community-operators\\\\\\\"}\\\\nI0127 07:52:57.037280 6957 lb_config.go:1031] Cluster endpoints for openshift-image-registry/image-registry for network=default are: map[]\\\\nI0127 07:52:57.037300 6957 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}\\\\nI0127 07:52:57.037235 6957 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-oauth-apiserver/api\\\\\\\"}\\\\nI0127 07:52:57.037326 6957 services_controller.go:360] Finished syncing service cluster-autoscaler-operator on namespace openshift-machine-api for network=default : 4.740846ms\\\\nI0127 07:52:57.037332 6957 services_controller.go:360] Finished syncing service api on namespace openshift-oauth-apiserver for network=default : 1.998381ms\\\\nI0127 07:52:57.037373 6957 services_controller.go:356] Processing sync for service openshift-ingress-canary/ingress-canary for network=default\\\\nF0127 07:52:57.037046 6957 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e6002\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.947769 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.947926 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.947962 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:58 crc kubenswrapper[4787]: E0127 07:52:58.948036 4787 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:52:58 crc kubenswrapper[4787]: E0127 07:52:58.948062 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.948014157 +0000 UTC m=+148.600369649 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:52:58 crc kubenswrapper[4787]: E0127 07:52:58.948110 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.948099399 +0000 UTC m=+148.600454891 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.948292 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:58 crc kubenswrapper[4787]: E0127 07:52:58.948325 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:52:58 crc kubenswrapper[4787]: E0127 07:52:58.948341 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.948346 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:58 crc kubenswrapper[4787]: E0127 07:52:58.948354 4787 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:58 crc kubenswrapper[4787]: E0127 07:52:58.948445 4787 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:52:58 crc kubenswrapper[4787]: E0127 07:52:58.948398 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 07:52:58 crc kubenswrapper[4787]: E0127 07:52:58.948533 4787 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 07:52:58 crc kubenswrapper[4787]: E0127 07:52:58.948578 4787 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:58 crc kubenswrapper[4787]: E0127 07:52:58.948491 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.948471267 +0000 UTC m=+148.600826759 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 07:52:58 crc kubenswrapper[4787]: E0127 07:52:58.948628 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.94861979 +0000 UTC m=+148.600975282 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:58 crc kubenswrapper[4787]: E0127 07:52:58.948641 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.94863446 +0000 UTC m=+148.600989952 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.957258 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.973130 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:52:58Z is after 2025-08-24T17:21:41Z" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.990384 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.990453 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.990471 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.990501 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:58 crc kubenswrapper[4787]: I0127 07:52:58.990524 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:58Z","lastTransitionTime":"2026-01-27T07:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.076134 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.076211 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:52:59 crc kubenswrapper[4787]: E0127 07:52:59.076282 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.076227 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.076227 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:52:59 crc kubenswrapper[4787]: E0127 07:52:59.076420 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:52:59 crc kubenswrapper[4787]: E0127 07:52:59.076638 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:52:59 crc kubenswrapper[4787]: E0127 07:52:59.076784 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.082856 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 20:30:41.728034698 +0000 UTC Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.093724 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.093764 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.093777 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.093796 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.093812 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:59Z","lastTransitionTime":"2026-01-27T07:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.197927 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.197991 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.198009 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.198034 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.198052 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:59Z","lastTransitionTime":"2026-01-27T07:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.301056 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.301112 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.301128 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.301153 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.301171 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:59Z","lastTransitionTime":"2026-01-27T07:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.404203 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.404281 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.404309 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.404339 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.404362 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:59Z","lastTransitionTime":"2026-01-27T07:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.507721 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.507798 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.507877 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.507981 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.508006 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:59Z","lastTransitionTime":"2026-01-27T07:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.611631 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.611700 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.611720 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.611750 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.611769 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:59Z","lastTransitionTime":"2026-01-27T07:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.715395 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.715479 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.715504 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.715538 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.715615 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:59Z","lastTransitionTime":"2026-01-27T07:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.818908 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.818948 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.818959 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.818978 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.818991 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:59Z","lastTransitionTime":"2026-01-27T07:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.922779 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.922857 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.922874 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.922897 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:52:59 crc kubenswrapper[4787]: I0127 07:52:59.922913 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:52:59Z","lastTransitionTime":"2026-01-27T07:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.026859 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.026918 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.026937 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.026965 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.026989 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:00Z","lastTransitionTime":"2026-01-27T07:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.083250 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 14:00:59.200373332 +0000 UTC Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.092829 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.131476 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.131539 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.131573 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.131601 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.131620 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:00Z","lastTransitionTime":"2026-01-27T07:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.234984 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.235046 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.235059 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.235078 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.235089 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:00Z","lastTransitionTime":"2026-01-27T07:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.338509 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.338578 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.338590 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.338610 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.338624 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:00Z","lastTransitionTime":"2026-01-27T07:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.442048 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.442131 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.442152 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.442179 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.442199 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:00Z","lastTransitionTime":"2026-01-27T07:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.545547 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.545664 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.545691 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.545719 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.545744 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:00Z","lastTransitionTime":"2026-01-27T07:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.649601 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.649678 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.649700 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.649726 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.649744 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:00Z","lastTransitionTime":"2026-01-27T07:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.752813 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.752864 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.752877 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.752895 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.752907 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:00Z","lastTransitionTime":"2026-01-27T07:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.857035 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.857118 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.857139 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.857170 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.857192 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:00Z","lastTransitionTime":"2026-01-27T07:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.961366 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.961432 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.961450 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.961481 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:00 crc kubenswrapper[4787]: I0127 07:53:00.961506 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:00Z","lastTransitionTime":"2026-01-27T07:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.065298 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.065373 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.065390 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.065417 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.065436 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:01Z","lastTransitionTime":"2026-01-27T07:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.075782 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.075931 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:01 crc kubenswrapper[4787]: E0127 07:53:01.076009 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.076071 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.076079 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:01 crc kubenswrapper[4787]: E0127 07:53:01.076295 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:01 crc kubenswrapper[4787]: E0127 07:53:01.076411 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:01 crc kubenswrapper[4787]: E0127 07:53:01.076625 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.084339 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 06:48:11.899995964 +0000 UTC Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.168894 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.168955 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.168975 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.169000 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.169018 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:01Z","lastTransitionTime":"2026-01-27T07:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.272818 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.272877 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.272889 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.272910 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.272923 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:01Z","lastTransitionTime":"2026-01-27T07:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.376329 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.376739 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.376949 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.377110 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.377285 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:01Z","lastTransitionTime":"2026-01-27T07:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.480373 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.480442 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.480465 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.480497 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.480517 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:01Z","lastTransitionTime":"2026-01-27T07:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.583805 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.583885 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.583957 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.583993 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.584011 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:01Z","lastTransitionTime":"2026-01-27T07:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.688078 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.688163 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.688182 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.688212 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.688231 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:01Z","lastTransitionTime":"2026-01-27T07:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.791118 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.791517 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.791615 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.791690 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.791765 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:01Z","lastTransitionTime":"2026-01-27T07:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.895394 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.895477 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.895497 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.895531 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.895600 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:01Z","lastTransitionTime":"2026-01-27T07:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.998036 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.998083 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.998095 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.998114 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:01 crc kubenswrapper[4787]: I0127 07:53:01.998127 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:01Z","lastTransitionTime":"2026-01-27T07:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.084524 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 18:13:12.359759477 +0000 UTC Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.101083 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.101128 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.101140 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.101157 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.101167 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:02Z","lastTransitionTime":"2026-01-27T07:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.203201 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.203251 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.203267 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.203286 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.203303 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:02Z","lastTransitionTime":"2026-01-27T07:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.307448 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.307505 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.307519 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.307540 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.307599 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:02Z","lastTransitionTime":"2026-01-27T07:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.409956 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.410001 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.410011 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.410029 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.410041 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:02Z","lastTransitionTime":"2026-01-27T07:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.513415 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.513481 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.513504 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.513525 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.513537 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:02Z","lastTransitionTime":"2026-01-27T07:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.617432 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.617524 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.617585 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.617619 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.617638 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:02Z","lastTransitionTime":"2026-01-27T07:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.721469 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.721584 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.721614 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.721649 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.721676 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:02Z","lastTransitionTime":"2026-01-27T07:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.781509 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.781625 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.781649 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.781681 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.781704 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:02Z","lastTransitionTime":"2026-01-27T07:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:02 crc kubenswrapper[4787]: E0127 07:53:02.800641 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.805840 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.805918 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.805938 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.805972 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.805998 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:02Z","lastTransitionTime":"2026-01-27T07:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:02 crc kubenswrapper[4787]: E0127 07:53:02.825494 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.831118 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.831305 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.831393 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.831510 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.831627 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:02Z","lastTransitionTime":"2026-01-27T07:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:02 crc kubenswrapper[4787]: E0127 07:53:02.855377 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.859492 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.859593 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.859612 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.859640 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.859657 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:02Z","lastTransitionTime":"2026-01-27T07:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:02 crc kubenswrapper[4787]: E0127 07:53:02.874666 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.881759 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.881837 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.881852 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.881873 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.881887 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:02Z","lastTransitionTime":"2026-01-27T07:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:02 crc kubenswrapper[4787]: E0127 07:53:02.896503 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:02Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:02 crc kubenswrapper[4787]: E0127 07:53:02.896724 4787 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.898524 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.898571 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.898592 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.898617 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:02 crc kubenswrapper[4787]: I0127 07:53:02.898627 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:02Z","lastTransitionTime":"2026-01-27T07:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.001101 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.001180 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.001195 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.001217 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.001234 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:03Z","lastTransitionTime":"2026-01-27T07:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.075832 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.075832 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.076009 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:03 crc kubenswrapper[4787]: E0127 07:53:03.076170 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:03 crc kubenswrapper[4787]: E0127 07:53:03.076489 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:03 crc kubenswrapper[4787]: E0127 07:53:03.076534 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.076301 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:03 crc kubenswrapper[4787]: E0127 07:53:03.076727 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.085576 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 00:06:46.900548752 +0000 UTC Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.104001 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.104041 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.104070 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.104091 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.104103 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:03Z","lastTransitionTime":"2026-01-27T07:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.207151 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.207230 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.207248 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.207278 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.207298 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:03Z","lastTransitionTime":"2026-01-27T07:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.310978 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.311790 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.312113 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.312227 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.312312 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:03Z","lastTransitionTime":"2026-01-27T07:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.415858 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.415928 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.415948 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.415977 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.416001 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:03Z","lastTransitionTime":"2026-01-27T07:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.520540 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.520824 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.520906 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.520982 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.521046 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:03Z","lastTransitionTime":"2026-01-27T07:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.623984 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.624351 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.624417 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.624508 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.624636 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:03Z","lastTransitionTime":"2026-01-27T07:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.727379 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.727450 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.727475 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.727507 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.727530 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:03Z","lastTransitionTime":"2026-01-27T07:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.830738 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.830870 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.830899 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.830924 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.830944 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:03Z","lastTransitionTime":"2026-01-27T07:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.933511 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.933684 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.933719 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.933752 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:03 crc kubenswrapper[4787]: I0127 07:53:03.933776 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:03Z","lastTransitionTime":"2026-01-27T07:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.042506 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.042627 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.042659 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.042686 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.042702 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:04Z","lastTransitionTime":"2026-01-27T07:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.086491 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 18:55:36.020738257 +0000 UTC Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.146633 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.146719 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.146748 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.146783 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.146807 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:04Z","lastTransitionTime":"2026-01-27T07:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.250722 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.250798 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.250816 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.250845 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.250864 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:04Z","lastTransitionTime":"2026-01-27T07:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.354971 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.355026 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.355042 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.355062 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.355075 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:04Z","lastTransitionTime":"2026-01-27T07:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.459271 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.459374 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.459402 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.459434 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.459460 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:04Z","lastTransitionTime":"2026-01-27T07:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.562835 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.562924 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.562945 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.562975 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.562997 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:04Z","lastTransitionTime":"2026-01-27T07:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.666934 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.666979 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.666992 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.667008 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.667019 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:04Z","lastTransitionTime":"2026-01-27T07:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.770366 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.770445 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.770469 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.770498 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.770519 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:04Z","lastTransitionTime":"2026-01-27T07:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.873428 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.873523 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.873543 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.873611 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.873639 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:04Z","lastTransitionTime":"2026-01-27T07:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.977096 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.977139 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.977148 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.977165 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:04 crc kubenswrapper[4787]: I0127 07:53:04.977177 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:04Z","lastTransitionTime":"2026-01-27T07:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.076400 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.076492 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.076428 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:05 crc kubenswrapper[4787]: E0127 07:53:05.076750 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:05 crc kubenswrapper[4787]: E0127 07:53:05.077086 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:05 crc kubenswrapper[4787]: E0127 07:53:05.077214 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.077242 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:05 crc kubenswrapper[4787]: E0127 07:53:05.077434 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.080343 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.080485 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.080629 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.080723 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.080829 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:05Z","lastTransitionTime":"2026-01-27T07:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.087338 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 23:14:26.447094203 +0000 UTC Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.098148 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f051e184-acac-47cf-9e04-9df648288715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf36b99a3389bdbbca8142539870671f6be61df22503df198f6255937e619950\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvg7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.113586 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d28aae4-3359-4d7e-8862-8db4b17a3403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d820bf9fe1c788b0bb17535226d96cd310fc188148091fa8b4186dd43baa5f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d407da23fc5babf6cec063fb31861fd8ac0f456a44746a83740c61c1e53a436c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0509b185e88f56d56bc6f8429a620d74855d938f1206d6c9193d8e93fc12ae91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec120e647d27b70e3066a00046d12258db3162ab9beef75b4079d546c2533aaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.131126 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2974b5846d4b9e95a3eab849f160c5b33393ff6b1bf1275bc6476ec80807d632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4426d84c4375c7c659c77049bbd6a720bf004fa5e7780b1e8739a2620c3667f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.146180 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5a64cdfd5f8aff93294f92aebc9ccf8fa2a4063e347fce2e35604d27651d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.161434 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fghc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cac1bb0-6b6f-442f-9db4-a25d4f194bb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://825d098d784c9e43aa1c5b9014dd290f1a23ddf0651dfd603c634682a8f05da2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8zvj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fghc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.174615 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rqwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe88a860-6bb6-40ef-8ed7-c16f06fad8d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dc7c2423b05f4eb857a5a6d8a5006cea055965d20a85e84659b369f9659eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-scmbf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rqwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.184032 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.184114 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.184128 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.184150 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.184169 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:05Z","lastTransitionTime":"2026-01-27T07:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.192924 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bef4a57-904d-47b1-baa1-224c5c9f2b1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c0fb5499a4bed619d003da60f83076e413fc5bda47a12d42770f567deeeec8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c470d3edd878f69c48f1457780557dd56300c0fe69342c479f1922dca856bceb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bba5d04b33d62090867842211184f53bc23cf78b90a4c6709792639b74d1cf0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.215063 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60eef58e-b2eb-43d9-a499-317083a89ca3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW0127 07:51:54.984485 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0127 07:51:54.984680 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 07:51:54.985504 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2155430209/tls.crt::/tmp/serving-cert-2155430209/tls.key\\\\\\\"\\\\nI0127 07:51:55.207010 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0127 07:51:55.214027 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0127 07:51:55.214056 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0127 07:51:55.214085 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0127 07:51:55.214092 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0127 07:51:55.221404 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0127 07:51:55.221608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221697 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0127 07:51:55.221768 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0127 07:51:55.221825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0127 07:51:55.221877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0127 07:51:55.221926 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0127 07:51:55.221414 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0127 07:51:55.222961 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.230761 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.250010 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.266358 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rqjpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4924d9f32dbc90c38c70d18db5a32bdabb76d288c34457331804829e451cf169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:46Z\\\",\\\"message\\\":\\\"2026-01-27T07:52:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3d832606-6840-4320-b93f-1be93a343206\\\\n2026-01-27T07:52:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3d832606-6840-4320-b93f-1be93a343206 to /host/opt/cni/bin/\\\\n2026-01-27T07:52:01Z [verbose] multus-daemon started\\\\n2026-01-27T07:52:01Z [verbose] Readiness Indicator file check\\\\n2026-01-27T07:52:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tz7b2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rqjpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.284209 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff88955-7cd9-4af4-9e0e-79614a1d2994\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c65fabdecb2c24a7eeabdff7a89339f711ff64e9cb6516900be57a2753f89b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7833b50fe96981aa0ed63aa03b7078def1c809e60092a901a8b7ebaee78ad8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0b5a15ba402c52fc6f3cd5d882f597c90ea8d1ab4e836f0e0998a301126f5a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82bb64425ded7611030cccfa7a439b14e545bf83bc48c762df144eddc7d08487\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9782b2a339d2dba1498f28ed3f7e6c4c2e747a282465311592f0fdb5863f823\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8924a3ef645447151d91c506d105c9760f0dce0c568e9bf37ea76108a6352b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f0da2ec606517cc9e0c2ed1d549c947db2eecb603f9bc9ba8bddbee0aad4710\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fqb6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.287301 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.287346 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.287383 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.287408 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.287424 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:05Z","lastTransitionTime":"2026-01-27T07:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.310873 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7279787f-4f29-4eae-a213-b7ea5d579b93\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://486c1b541ff6adc6a3cdab348fbdd7fbcf1c202c1a474e5e5db37bbcfb38fe06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47800c35810b19642ee37e41f20900bef93d843d1d7e2219547dde7bf88cc7dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85684c09dd546fc9a972aa45da092b22d794a5588140a451becfbe962be82b76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0aa5c186d3ea5f41540e7450b9ee5a5bbfe8359762ab9aa219a4e4bb26fb0a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9717409dfdeb0d26d9a1c28a651d1feddfd4aaa11a9bc7db7206fbf8c6400c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fb183cca1d2f6f6f5bd9c80107e85fe0f5f39ea985a036ba115836e45d7cc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb746c11b2a67e014815e16d6765a1c86a3d2c156cae83853881bdb988507c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1fa78137ccc23e32f6eb838abf3991ca3d99b8349721f1419c892a7f8795f55f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.345088 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a18f3e6327858658093c392b8b7de18ec6a1085c08ae31ae2f2dc371555011a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.371214 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.390410 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.390462 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.390473 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.390492 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.390504 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:05Z","lastTransitionTime":"2026-01-27T07:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.393290 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vws75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq2mg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vws75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.407432 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d547b44-2e4d-4ca1-b1a2-e811ddaef9d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cae58e072653f1ba9869c3d6a218332a184032d95378dfba6be1d0c683d6ddf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a626256ec689285dfaf54ef25e23d40bef94d10fec21b7eca529c49e527d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a626256ec689285dfaf54ef25e23d40bef94d10fec21b7eca529c49e527d5a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.428296 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa44405c-042c-485a-ab6c-912dcd377751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:51:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T07:52:57Z\\\",\\\"message\\\":\\\"57 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/community-operators\\\\\\\"}\\\\nI0127 07:52:57.037280 6957 lb_config.go:1031] Cluster endpoints for openshift-image-registry/image-registry for network=default are: map[]\\\\nI0127 07:52:57.037300 6957 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}\\\\nI0127 07:52:57.037235 6957 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-oauth-apiserver/api\\\\\\\"}\\\\nI0127 07:52:57.037326 6957 services_controller.go:360] Finished syncing service cluster-autoscaler-operator on namespace openshift-machine-api for network=default : 4.740846ms\\\\nI0127 07:52:57.037332 6957 services_controller.go:360] Finished syncing service api on namespace openshift-oauth-apiserver for network=default : 1.998381ms\\\\nI0127 07:52:57.037373 6957 services_controller.go:356] Processing sync for service openshift-ingress-canary/ingress-canary for network=default\\\\nF0127 07:52:57.037046 6957 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e6002\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T07:52:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T07:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tj4tn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:51:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7642m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.443274 4787 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"643aaef9-e302-436b-943e-940480ef74fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T07:52:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cda08199034af06aa5966ebd72a8a553fe0c83acf73bbd42f4f7d6bb121b2e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea42ae75a943bc89609995381bc82366333131d7c7200a3ab758b9de239e3283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T07:52:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8q989\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T07:52:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vhss5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:05Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.494611 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.494685 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.494704 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.494732 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.494753 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:05Z","lastTransitionTime":"2026-01-27T07:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.597824 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.597875 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.597893 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.597912 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.597925 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:05Z","lastTransitionTime":"2026-01-27T07:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.700340 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.700405 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.700420 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.700446 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.700462 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:05Z","lastTransitionTime":"2026-01-27T07:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.804587 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.804636 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.804645 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.804666 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.804677 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:05Z","lastTransitionTime":"2026-01-27T07:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.907506 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.907640 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.907660 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.907685 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:05 crc kubenswrapper[4787]: I0127 07:53:05.907705 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:05Z","lastTransitionTime":"2026-01-27T07:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.010771 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.010849 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.010870 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.010905 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.010925 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:06Z","lastTransitionTime":"2026-01-27T07:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.087528 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 19:26:47.194521815 +0000 UTC Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.114600 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.114670 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.114689 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.114716 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.114736 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:06Z","lastTransitionTime":"2026-01-27T07:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.218993 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.219049 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.219062 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.219081 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.219097 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:06Z","lastTransitionTime":"2026-01-27T07:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.322578 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.322635 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.322647 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.322669 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.322685 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:06Z","lastTransitionTime":"2026-01-27T07:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.424977 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.425030 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.425042 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.425066 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.425104 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:06Z","lastTransitionTime":"2026-01-27T07:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.528438 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.528489 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.528501 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.528521 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.528538 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:06Z","lastTransitionTime":"2026-01-27T07:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.631600 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.631645 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.631656 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.631677 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.631697 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:06Z","lastTransitionTime":"2026-01-27T07:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.734524 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.734655 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.734672 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.734698 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.734716 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:06Z","lastTransitionTime":"2026-01-27T07:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.837025 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.837094 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.837109 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.837130 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.837141 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:06Z","lastTransitionTime":"2026-01-27T07:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.939796 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.939842 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.939852 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.939866 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:06 crc kubenswrapper[4787]: I0127 07:53:06.939876 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:06Z","lastTransitionTime":"2026-01-27T07:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.043314 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.043392 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.043411 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.043440 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.043463 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:07Z","lastTransitionTime":"2026-01-27T07:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.076060 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.076214 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:07 crc kubenswrapper[4787]: E0127 07:53:07.076227 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.076536 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.076861 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:07 crc kubenswrapper[4787]: E0127 07:53:07.076837 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:07 crc kubenswrapper[4787]: E0127 07:53:07.077042 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:07 crc kubenswrapper[4787]: E0127 07:53:07.077268 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.088450 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 05:29:25.31940747 +0000 UTC Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.147362 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.147409 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.147418 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.147437 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.147450 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:07Z","lastTransitionTime":"2026-01-27T07:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.250677 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.250763 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.250781 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.250814 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.250834 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:07Z","lastTransitionTime":"2026-01-27T07:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.354099 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.354177 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.354196 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.354223 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.354245 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:07Z","lastTransitionTime":"2026-01-27T07:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.457386 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.457457 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.457472 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.457497 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.457512 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:07Z","lastTransitionTime":"2026-01-27T07:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.560604 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.560699 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.560717 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.560742 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.560759 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:07Z","lastTransitionTime":"2026-01-27T07:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.663597 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.663664 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.663680 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.663704 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.663726 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:07Z","lastTransitionTime":"2026-01-27T07:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.766972 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.767032 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.767054 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.767076 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.767091 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:07Z","lastTransitionTime":"2026-01-27T07:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.870657 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.870700 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.870710 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.870726 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.870736 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:07Z","lastTransitionTime":"2026-01-27T07:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.973522 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.973614 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.973637 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.973662 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:07 crc kubenswrapper[4787]: I0127 07:53:07.973684 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:07Z","lastTransitionTime":"2026-01-27T07:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.077002 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.077094 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.077113 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.077139 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.077160 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:08Z","lastTransitionTime":"2026-01-27T07:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.089579 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 22:37:11.807882283 +0000 UTC Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.182417 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.182886 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.182985 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.183110 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.183238 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:08Z","lastTransitionTime":"2026-01-27T07:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.286492 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.286573 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.286588 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.286615 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.286627 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:08Z","lastTransitionTime":"2026-01-27T07:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.389877 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.390264 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.390458 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.390674 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.390810 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:08Z","lastTransitionTime":"2026-01-27T07:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.494023 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.494088 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.494105 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.494133 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.494154 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:08Z","lastTransitionTime":"2026-01-27T07:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.598117 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.598174 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.598187 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.598206 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.598217 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:08Z","lastTransitionTime":"2026-01-27T07:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.702063 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.702133 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.702147 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.702170 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.702188 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:08Z","lastTransitionTime":"2026-01-27T07:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.805677 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.805740 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.805753 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.805775 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.805792 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:08Z","lastTransitionTime":"2026-01-27T07:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.909503 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.909569 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.909582 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.909602 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:08 crc kubenswrapper[4787]: I0127 07:53:08.909617 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:08Z","lastTransitionTime":"2026-01-27T07:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.013284 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.013355 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.013381 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.013417 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.013444 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:09Z","lastTransitionTime":"2026-01-27T07:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.077144 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.077203 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:09 crc kubenswrapper[4787]: E0127 07:53:09.077319 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.077430 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:09 crc kubenswrapper[4787]: E0127 07:53:09.077486 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.077613 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:09 crc kubenswrapper[4787]: E0127 07:53:09.077765 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:09 crc kubenswrapper[4787]: E0127 07:53:09.077876 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.089792 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 04:58:03.668382459 +0000 UTC Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.116442 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.116577 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.116611 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.116641 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.116662 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:09Z","lastTransitionTime":"2026-01-27T07:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.220185 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.220242 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.220253 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.220275 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.220289 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:09Z","lastTransitionTime":"2026-01-27T07:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.323875 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.323951 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.323969 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.323998 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.324016 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:09Z","lastTransitionTime":"2026-01-27T07:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.427527 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.427611 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.427625 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.427671 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.427684 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:09Z","lastTransitionTime":"2026-01-27T07:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.531125 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.531192 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.531207 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.531230 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.531247 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:09Z","lastTransitionTime":"2026-01-27T07:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.634202 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.634235 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.634295 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.634311 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.634321 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:09Z","lastTransitionTime":"2026-01-27T07:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.737424 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.737497 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.737513 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.737538 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.737575 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:09Z","lastTransitionTime":"2026-01-27T07:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.840231 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.840278 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.840289 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.840304 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.840315 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:09Z","lastTransitionTime":"2026-01-27T07:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.943083 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.943172 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.943191 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.943223 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:09 crc kubenswrapper[4787]: I0127 07:53:09.943236 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:09Z","lastTransitionTime":"2026-01-27T07:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.045972 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.046251 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.046359 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.046429 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.046498 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:10Z","lastTransitionTime":"2026-01-27T07:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.090483 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 09:41:49.259817597 +0000 UTC Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.150347 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.150443 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.150464 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.150856 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.151102 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:10Z","lastTransitionTime":"2026-01-27T07:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.254488 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.254619 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.254653 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.254689 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.254713 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:10Z","lastTransitionTime":"2026-01-27T07:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.357870 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.357911 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.357921 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.357939 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.357949 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:10Z","lastTransitionTime":"2026-01-27T07:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.460394 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.460502 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.460527 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.460624 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.460651 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:10Z","lastTransitionTime":"2026-01-27T07:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.564607 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.564982 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.565066 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.565153 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.565211 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:10Z","lastTransitionTime":"2026-01-27T07:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.668720 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.668878 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.668913 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.668950 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.668973 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:10Z","lastTransitionTime":"2026-01-27T07:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.772079 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.772115 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.772125 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.772142 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.772151 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:10Z","lastTransitionTime":"2026-01-27T07:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.874886 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.874978 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.874997 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.875027 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.875047 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:10Z","lastTransitionTime":"2026-01-27T07:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.978989 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.979080 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.979105 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.979140 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:10 crc kubenswrapper[4787]: I0127 07:53:10.979169 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:10Z","lastTransitionTime":"2026-01-27T07:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.076224 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.076418 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:11 crc kubenswrapper[4787]: E0127 07:53:11.076564 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.076601 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:11 crc kubenswrapper[4787]: E0127 07:53:11.076823 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:11 crc kubenswrapper[4787]: E0127 07:53:11.077583 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.078632 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:11 crc kubenswrapper[4787]: E0127 07:53:11.079031 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.083082 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.083455 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.083684 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.083956 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.084267 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:11Z","lastTransitionTime":"2026-01-27T07:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.091160 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 15:16:19.436378694 +0000 UTC Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.187533 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.187582 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.187593 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.187607 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.187618 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:11Z","lastTransitionTime":"2026-01-27T07:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.289803 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.289845 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.289854 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.289868 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.289878 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:11Z","lastTransitionTime":"2026-01-27T07:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.392612 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.392979 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.393092 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.393269 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.393464 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:11Z","lastTransitionTime":"2026-01-27T07:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.497139 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.497422 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.497524 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.497639 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.497743 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:11Z","lastTransitionTime":"2026-01-27T07:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.601292 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.601372 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.601395 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.601430 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.601452 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:11Z","lastTransitionTime":"2026-01-27T07:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.703888 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.703979 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.704006 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.704040 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.704065 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:11Z","lastTransitionTime":"2026-01-27T07:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.807758 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.808211 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.808335 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.808448 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.808579 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:11Z","lastTransitionTime":"2026-01-27T07:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.911281 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.911321 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.911330 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.911346 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:11 crc kubenswrapper[4787]: I0127 07:53:11.911357 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:11Z","lastTransitionTime":"2026-01-27T07:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.015008 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.015398 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.015474 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.015602 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.015689 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:12Z","lastTransitionTime":"2026-01-27T07:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.092001 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 14:25:23.100468862 +0000 UTC Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.119114 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.119365 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.119524 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.119814 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.120038 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:12Z","lastTransitionTime":"2026-01-27T07:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.228527 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.228653 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.228676 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.228709 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.228737 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:12Z","lastTransitionTime":"2026-01-27T07:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.332247 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.332322 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.332344 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.332398 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.332418 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:12Z","lastTransitionTime":"2026-01-27T07:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.436026 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.436075 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.436089 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.436109 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.436126 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:12Z","lastTransitionTime":"2026-01-27T07:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.538473 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.538530 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.538582 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.538635 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.538662 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:12Z","lastTransitionTime":"2026-01-27T07:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.642151 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.642250 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.642288 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.642322 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.642344 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:12Z","lastTransitionTime":"2026-01-27T07:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.745332 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.745379 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.745387 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.745404 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.745414 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:12Z","lastTransitionTime":"2026-01-27T07:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.848741 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.848818 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.848837 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.848865 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.848885 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:12Z","lastTransitionTime":"2026-01-27T07:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.952781 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.952863 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.952899 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.952949 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:12 crc kubenswrapper[4787]: I0127 07:53:12.952975 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:12Z","lastTransitionTime":"2026-01-27T07:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.055725 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.055777 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.055785 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.055803 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.055815 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:13Z","lastTransitionTime":"2026-01-27T07:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.076130 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.076203 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.076283 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:13 crc kubenswrapper[4787]: E0127 07:53:13.076451 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.076497 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:13 crc kubenswrapper[4787]: E0127 07:53:13.076545 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:13 crc kubenswrapper[4787]: E0127 07:53:13.076818 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:13 crc kubenswrapper[4787]: E0127 07:53:13.076908 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.078061 4787 scope.go:117] "RemoveContainer" containerID="6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637" Jan 27 07:53:13 crc kubenswrapper[4787]: E0127 07:53:13.078218 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.092671 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 18:12:36.090549499 +0000 UTC Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.158788 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.158824 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.158833 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.158850 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.158863 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:13Z","lastTransitionTime":"2026-01-27T07:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.262129 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.262188 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.262201 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.262229 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.262246 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:13Z","lastTransitionTime":"2026-01-27T07:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.290027 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.290102 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.290114 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.290133 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.290147 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:13Z","lastTransitionTime":"2026-01-27T07:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:13 crc kubenswrapper[4787]: E0127 07:53:13.306434 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.311988 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.312262 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.312464 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.312684 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.312845 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:13Z","lastTransitionTime":"2026-01-27T07:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:13 crc kubenswrapper[4787]: E0127 07:53:13.331845 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.337234 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.337577 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.337708 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.337815 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.337894 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:13Z","lastTransitionTime":"2026-01-27T07:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:13 crc kubenswrapper[4787]: E0127 07:53:13.354444 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.358603 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.358728 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.358811 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.358892 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.358955 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:13Z","lastTransitionTime":"2026-01-27T07:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:13 crc kubenswrapper[4787]: E0127 07:53:13.371708 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.377260 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.377321 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.377340 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.377368 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.377388 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:13Z","lastTransitionTime":"2026-01-27T07:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:13 crc kubenswrapper[4787]: E0127 07:53:13.394072 4787 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T07:53:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"55ded10c-ad4c-443d-a571-c7a8f4a50b03\\\",\\\"systemUUID\\\":\\\"553a2493-323d-43ed-bfcf-44c5a8746c19\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T07:53:13Z is after 2025-08-24T17:21:41Z" Jan 27 07:53:13 crc kubenswrapper[4787]: E0127 07:53:13.394589 4787 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.397488 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.397686 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.397915 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.398159 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.398373 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:13Z","lastTransitionTime":"2026-01-27T07:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.501544 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.501940 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.502033 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.502110 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.502173 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:13Z","lastTransitionTime":"2026-01-27T07:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.604670 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.604723 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.604735 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.604754 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.604765 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:13Z","lastTransitionTime":"2026-01-27T07:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.707277 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.707325 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.707334 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.707351 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.707362 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:13Z","lastTransitionTime":"2026-01-27T07:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.810513 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.810590 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.810605 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.810630 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.810648 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:13Z","lastTransitionTime":"2026-01-27T07:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.913103 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.913155 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.913167 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.913185 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:13 crc kubenswrapper[4787]: I0127 07:53:13.913196 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:13Z","lastTransitionTime":"2026-01-27T07:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.015952 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.016004 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.016017 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.016035 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.016049 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:14Z","lastTransitionTime":"2026-01-27T07:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.093196 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 10:30:05.548127765 +0000 UTC Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.119135 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.119181 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.119192 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.119208 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.119220 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:14Z","lastTransitionTime":"2026-01-27T07:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.223230 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.223611 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.223712 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.223826 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.223915 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:14Z","lastTransitionTime":"2026-01-27T07:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.327466 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.327528 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.327568 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.327593 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.327613 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:14Z","lastTransitionTime":"2026-01-27T07:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.430966 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.431025 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.431039 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.431064 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.431080 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:14Z","lastTransitionTime":"2026-01-27T07:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.534776 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.534846 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.534862 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.534886 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.534902 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:14Z","lastTransitionTime":"2026-01-27T07:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.637466 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.637540 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.637591 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.637618 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.637638 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:14Z","lastTransitionTime":"2026-01-27T07:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.740474 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.740544 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.740597 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.740623 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.740642 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:14Z","lastTransitionTime":"2026-01-27T07:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.842982 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.843021 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.843030 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.843045 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.843056 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:14Z","lastTransitionTime":"2026-01-27T07:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.945643 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.945696 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.945708 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.945727 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:14 crc kubenswrapper[4787]: I0127 07:53:14.945742 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:14Z","lastTransitionTime":"2026-01-27T07:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.049692 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.049750 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.049764 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.049789 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.049806 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:15Z","lastTransitionTime":"2026-01-27T07:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.076544 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.076599 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.076760 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:15 crc kubenswrapper[4787]: E0127 07:53:15.076952 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.077012 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:15 crc kubenswrapper[4787]: E0127 07:53:15.077364 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:15 crc kubenswrapper[4787]: E0127 07:53:15.077236 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:15 crc kubenswrapper[4787]: E0127 07:53:15.077487 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.094163 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 17:14:52.050697245 +0000 UTC Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.118151 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-fghc7" podStartSLOduration=77.118127865 podStartE2EDuration="1m17.118127865s" podCreationTimestamp="2026-01-27 07:51:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:15.107529942 +0000 UTC m=+100.759885434" watchObservedRunningTime="2026-01-27 07:53:15.118127865 +0000 UTC m=+100.770483357" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.138926 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=81.138889633 podStartE2EDuration="1m21.138889633s" podCreationTimestamp="2026-01-27 07:51:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:15.138800352 +0000 UTC m=+100.791155844" watchObservedRunningTime="2026-01-27 07:53:15.138889633 +0000 UTC m=+100.791245135" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.139349 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-rqwfc" podStartSLOduration=77.139338152 podStartE2EDuration="1m17.139338152s" podCreationTimestamp="2026-01-27 07:51:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:15.118525983 +0000 UTC m=+100.770881515" watchObservedRunningTime="2026-01-27 07:53:15.139338152 +0000 UTC m=+100.791693654" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.153064 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.153100 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.153110 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.153124 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.153133 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:15Z","lastTransitionTime":"2026-01-27T07:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.181469 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=80.18142693 podStartE2EDuration="1m20.18142693s" podCreationTimestamp="2026-01-27 07:51:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:15.164447948 +0000 UTC m=+100.816803440" watchObservedRunningTime="2026-01-27 07:53:15.18142693 +0000 UTC m=+100.833782472" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.220305 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-rqjpz" podStartSLOduration=76.220263692 podStartE2EDuration="1m16.220263692s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:15.219117739 +0000 UTC m=+100.871473241" watchObservedRunningTime="2026-01-27 07:53:15.220263692 +0000 UTC m=+100.872619224" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.247340 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-fqb6r" podStartSLOduration=76.245261526 podStartE2EDuration="1m16.245261526s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:15.242868348 +0000 UTC m=+100.895223850" watchObservedRunningTime="2026-01-27 07:53:15.245261526 +0000 UTC m=+100.897617018" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.262980 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.263055 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.263069 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.263094 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.263113 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:15Z","lastTransitionTime":"2026-01-27T07:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.292006 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=78.291979177 podStartE2EDuration="1m18.291979177s" podCreationTimestamp="2026-01-27 07:51:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:15.274023336 +0000 UTC m=+100.926378838" watchObservedRunningTime="2026-01-27 07:53:15.291979177 +0000 UTC m=+100.944334679" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.355187 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=15.355153649 podStartE2EDuration="15.355153649s" podCreationTimestamp="2026-01-27 07:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:15.328958332 +0000 UTC m=+100.981313824" watchObservedRunningTime="2026-01-27 07:53:15.355153649 +0000 UTC m=+101.007509141" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.365486 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.365564 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.365580 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.365609 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.365625 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:15Z","lastTransitionTime":"2026-01-27T07:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.370200 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vhss5" podStartSLOduration=76.370173022 podStartE2EDuration="1m16.370173022s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:15.369036259 +0000 UTC m=+101.021391761" watchObservedRunningTime="2026-01-27 07:53:15.370173022 +0000 UTC m=+101.022528514" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.383987 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podStartSLOduration=76.38396656 podStartE2EDuration="1m16.38396656s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:15.382263426 +0000 UTC m=+101.034618938" watchObservedRunningTime="2026-01-27 07:53:15.38396656 +0000 UTC m=+101.036322052" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.397000 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=48.396968041 podStartE2EDuration="48.396968041s" podCreationTimestamp="2026-01-27 07:52:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:15.396260727 +0000 UTC m=+101.048616219" watchObservedRunningTime="2026-01-27 07:53:15.396968041 +0000 UTC m=+101.049323553" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.467463 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.467514 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.467529 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.467562 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.467576 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:15Z","lastTransitionTime":"2026-01-27T07:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.570879 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.570953 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.570967 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.570987 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.571001 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:15Z","lastTransitionTime":"2026-01-27T07:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.673494 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.673613 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.673629 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.673647 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.673657 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:15Z","lastTransitionTime":"2026-01-27T07:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.775890 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.775954 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.775968 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.775985 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.775998 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:15Z","lastTransitionTime":"2026-01-27T07:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.878847 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.878898 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.878908 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.878929 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.878942 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:15Z","lastTransitionTime":"2026-01-27T07:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.982125 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.982177 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.982187 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.982205 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:15 crc kubenswrapper[4787]: I0127 07:53:15.982216 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:15Z","lastTransitionTime":"2026-01-27T07:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.085618 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.085690 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.085720 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.085782 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.085807 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:16Z","lastTransitionTime":"2026-01-27T07:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.095188 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 23:09:21.540851714 +0000 UTC Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.188829 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.188895 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.188914 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.188943 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.188963 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:16Z","lastTransitionTime":"2026-01-27T07:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.292979 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.293111 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.293136 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.293168 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.293193 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:16Z","lastTransitionTime":"2026-01-27T07:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.396353 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.396394 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.396404 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.396423 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.396434 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:16Z","lastTransitionTime":"2026-01-27T07:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.500667 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.500715 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.500727 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.500748 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.500760 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:16Z","lastTransitionTime":"2026-01-27T07:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.603536 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.603648 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.603676 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.603709 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.603733 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:16Z","lastTransitionTime":"2026-01-27T07:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.707687 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.707753 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.707771 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.707796 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.707815 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:16Z","lastTransitionTime":"2026-01-27T07:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.811686 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.811745 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.811763 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.811791 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.811811 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:16Z","lastTransitionTime":"2026-01-27T07:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.914871 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.914925 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.914937 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.914956 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:16 crc kubenswrapper[4787]: I0127 07:53:16.914969 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:16Z","lastTransitionTime":"2026-01-27T07:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.017702 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.017762 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.017773 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.017795 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.017813 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:17Z","lastTransitionTime":"2026-01-27T07:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.076391 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.076570 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:17 crc kubenswrapper[4787]: E0127 07:53:17.076858 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:17 crc kubenswrapper[4787]: E0127 07:53:17.076976 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.076604 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.077086 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:17 crc kubenswrapper[4787]: E0127 07:53:17.077330 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:17 crc kubenswrapper[4787]: E0127 07:53:17.077596 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.095435 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 02:53:21.509390053 +0000 UTC Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.121305 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.121370 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.121397 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.121430 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.121451 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:17Z","lastTransitionTime":"2026-01-27T07:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.224789 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.224828 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.224837 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.224855 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.224876 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:17Z","lastTransitionTime":"2026-01-27T07:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.328517 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.328581 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.328592 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.328610 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.328624 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:17Z","lastTransitionTime":"2026-01-27T07:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.433145 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.433204 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.433223 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.433251 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.433270 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:17Z","lastTransitionTime":"2026-01-27T07:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.536202 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.536243 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.536253 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.536272 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.536285 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:17Z","lastTransitionTime":"2026-01-27T07:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.640986 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.641078 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.641105 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.641210 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.641236 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:17Z","lastTransitionTime":"2026-01-27T07:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.684160 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs\") pod \"network-metrics-daemon-vws75\" (UID: \"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\") " pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:17 crc kubenswrapper[4787]: E0127 07:53:17.684485 4787 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:53:17 crc kubenswrapper[4787]: E0127 07:53:17.684754 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs podName:3969f21f-ab36-49b4-9a9c-02cf19e65ad0 nodeName:}" failed. No retries permitted until 2026-01-27 07:54:21.68473377 +0000 UTC m=+167.337089262 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs") pod "network-metrics-daemon-vws75" (UID: "3969f21f-ab36-49b4-9a9c-02cf19e65ad0") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.743823 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.743889 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.743906 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.743937 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.743956 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:17Z","lastTransitionTime":"2026-01-27T07:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.847898 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.847976 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.847993 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.848022 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.848039 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:17Z","lastTransitionTime":"2026-01-27T07:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.950833 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.950896 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.950916 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.950942 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:17 crc kubenswrapper[4787]: I0127 07:53:17.950962 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:17Z","lastTransitionTime":"2026-01-27T07:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.054492 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.054562 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.054581 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.054601 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.054615 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:18Z","lastTransitionTime":"2026-01-27T07:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.096333 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 07:29:32.639292684 +0000 UTC Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.158060 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.158125 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.158144 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.158169 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.158188 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:18Z","lastTransitionTime":"2026-01-27T07:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.261061 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.261118 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.261130 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.261148 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.261161 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:18Z","lastTransitionTime":"2026-01-27T07:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.365186 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.365265 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.365289 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.365320 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.365339 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:18Z","lastTransitionTime":"2026-01-27T07:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.468927 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.469496 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.469749 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.469940 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.470084 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:18Z","lastTransitionTime":"2026-01-27T07:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.573941 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.574091 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.574109 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.574138 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.574158 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:18Z","lastTransitionTime":"2026-01-27T07:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.678093 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.678175 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.678194 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.678224 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.678243 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:18Z","lastTransitionTime":"2026-01-27T07:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.781878 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.781942 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.781957 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.781981 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.781997 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:18Z","lastTransitionTime":"2026-01-27T07:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.885991 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.886366 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.886490 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.886664 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.886774 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:18Z","lastTransitionTime":"2026-01-27T07:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.989901 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.989955 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.989968 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.989994 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:18 crc kubenswrapper[4787]: I0127 07:53:18.990008 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:18Z","lastTransitionTime":"2026-01-27T07:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.076668 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.076668 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.076707 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.076738 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:19 crc kubenswrapper[4787]: E0127 07:53:19.077940 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:19 crc kubenswrapper[4787]: E0127 07:53:19.078045 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:19 crc kubenswrapper[4787]: E0127 07:53:19.078140 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:19 crc kubenswrapper[4787]: E0127 07:53:19.078255 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.091890 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.092158 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.092235 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.092319 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.092397 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:19Z","lastTransitionTime":"2026-01-27T07:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.097153 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 09:03:49.025622369 +0000 UTC Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.195029 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.195351 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.195421 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.195486 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.195593 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:19Z","lastTransitionTime":"2026-01-27T07:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.298614 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.298675 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.298692 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.298718 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.298736 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:19Z","lastTransitionTime":"2026-01-27T07:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.402382 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.402444 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.402458 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.402481 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.402494 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:19Z","lastTransitionTime":"2026-01-27T07:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.506354 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.506434 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.506453 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.506481 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.506501 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:19Z","lastTransitionTime":"2026-01-27T07:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.610456 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.610522 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.610539 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.610590 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.610619 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:19Z","lastTransitionTime":"2026-01-27T07:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.713966 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.714021 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.714031 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.714047 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.714059 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:19Z","lastTransitionTime":"2026-01-27T07:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.816952 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.817032 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.817051 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.817078 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.817099 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:19Z","lastTransitionTime":"2026-01-27T07:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.919590 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.919645 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.919656 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.919680 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:19 crc kubenswrapper[4787]: I0127 07:53:19.919694 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:19Z","lastTransitionTime":"2026-01-27T07:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.023102 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.023164 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.023178 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.023200 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.023214 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:20Z","lastTransitionTime":"2026-01-27T07:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.097972 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 08:31:54.511500745 +0000 UTC Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.127093 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.127169 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.127185 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.127210 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.127231 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:20Z","lastTransitionTime":"2026-01-27T07:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.230626 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.230694 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.230713 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.230753 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.230775 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:20Z","lastTransitionTime":"2026-01-27T07:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.334274 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.334370 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.334433 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.334472 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.334493 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:20Z","lastTransitionTime":"2026-01-27T07:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.437421 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.437488 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.437509 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.437535 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.437583 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:20Z","lastTransitionTime":"2026-01-27T07:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.540244 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.540283 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.540292 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.540307 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.540317 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:20Z","lastTransitionTime":"2026-01-27T07:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.643812 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.643940 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.643979 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.644014 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.644034 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:20Z","lastTransitionTime":"2026-01-27T07:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.747675 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.747752 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.747777 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.747809 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.747829 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:20Z","lastTransitionTime":"2026-01-27T07:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.850885 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.850942 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.850956 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.850976 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.850991 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:20Z","lastTransitionTime":"2026-01-27T07:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.953103 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.953139 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.953150 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.953163 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:20 crc kubenswrapper[4787]: I0127 07:53:20.953173 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:20Z","lastTransitionTime":"2026-01-27T07:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.056208 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.056239 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.056247 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.056260 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.056269 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:21Z","lastTransitionTime":"2026-01-27T07:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.076911 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.077018 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.077048 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:21 crc kubenswrapper[4787]: E0127 07:53:21.077069 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.077115 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:21 crc kubenswrapper[4787]: E0127 07:53:21.077150 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:21 crc kubenswrapper[4787]: E0127 07:53:21.077300 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:21 crc kubenswrapper[4787]: E0127 07:53:21.077341 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.099127 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 04:40:47.543571915 +0000 UTC Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.158519 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.158583 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.158594 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.158609 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.158621 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:21Z","lastTransitionTime":"2026-01-27T07:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.262118 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.262183 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.262201 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.262301 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.262333 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:21Z","lastTransitionTime":"2026-01-27T07:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.366276 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.366367 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.366385 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.366409 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.366426 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:21Z","lastTransitionTime":"2026-01-27T07:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.469252 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.469669 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.469809 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.470046 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.470202 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:21Z","lastTransitionTime":"2026-01-27T07:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.573703 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.574264 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.574468 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.574723 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.574910 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:21Z","lastTransitionTime":"2026-01-27T07:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.679029 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.679592 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.679860 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.680149 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.680345 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:21Z","lastTransitionTime":"2026-01-27T07:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.783845 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.783907 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.783923 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.783946 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.783962 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:21Z","lastTransitionTime":"2026-01-27T07:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.887935 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.887988 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.888000 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.888024 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.888037 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:21Z","lastTransitionTime":"2026-01-27T07:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.992162 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.992217 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.992228 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.992248 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:21 crc kubenswrapper[4787]: I0127 07:53:21.992261 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:21Z","lastTransitionTime":"2026-01-27T07:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.095672 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.095738 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.095753 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.095777 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.095791 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:22Z","lastTransitionTime":"2026-01-27T07:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.100893 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-06 20:12:16.778250298 +0000 UTC Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.199083 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.199118 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.199127 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.199142 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.199153 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:22Z","lastTransitionTime":"2026-01-27T07:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.302666 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.302723 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.302736 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.302761 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.302782 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:22Z","lastTransitionTime":"2026-01-27T07:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.406236 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.406300 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.406312 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.406332 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.406347 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:22Z","lastTransitionTime":"2026-01-27T07:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.510419 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.510498 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.510517 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.510545 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.510604 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:22Z","lastTransitionTime":"2026-01-27T07:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.614597 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.615025 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.615136 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.615255 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.615347 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:22Z","lastTransitionTime":"2026-01-27T07:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.718116 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.718495 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.718639 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.718748 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.718844 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:22Z","lastTransitionTime":"2026-01-27T07:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.822503 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.822894 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.823076 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.823256 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.823448 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:22Z","lastTransitionTime":"2026-01-27T07:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.926979 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.927055 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.927071 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.927097 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:22 crc kubenswrapper[4787]: I0127 07:53:22.927117 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:22Z","lastTransitionTime":"2026-01-27T07:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.030374 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.030449 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.030475 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.030511 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.030539 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:23Z","lastTransitionTime":"2026-01-27T07:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.076097 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.076180 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.076268 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:23 crc kubenswrapper[4787]: E0127 07:53:23.076348 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:23 crc kubenswrapper[4787]: E0127 07:53:23.076598 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:23 crc kubenswrapper[4787]: E0127 07:53:23.076752 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.076887 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:23 crc kubenswrapper[4787]: E0127 07:53:23.077135 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.101604 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 15:13:41.879854419 +0000 UTC Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.133944 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.134001 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.134015 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.134035 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.134048 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:23Z","lastTransitionTime":"2026-01-27T07:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.236481 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.236543 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.236587 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.236609 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.236623 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:23Z","lastTransitionTime":"2026-01-27T07:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.339603 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.339650 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.339662 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.339681 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.339693 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:23Z","lastTransitionTime":"2026-01-27T07:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.443355 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.443421 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.443440 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.443480 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.443497 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:23Z","lastTransitionTime":"2026-01-27T07:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.515411 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.515475 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.515493 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.515522 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.515543 4787 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T07:53:23Z","lastTransitionTime":"2026-01-27T07:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.572868 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54"] Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.573239 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.575617 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.576519 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.576843 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.579533 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.661076 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.661141 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.661184 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.661405 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.661749 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.763048 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.763214 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.763284 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.763337 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.763391 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.763544 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.763383 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.767949 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.776532 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.783598 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-ksn54\" (UID: \"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:23 crc kubenswrapper[4787]: I0127 07:53:23.894334 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" Jan 27 07:53:24 crc kubenswrapper[4787]: I0127 07:53:24.102502 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 11:26:03.376828637 +0000 UTC Jan 27 07:53:24 crc kubenswrapper[4787]: I0127 07:53:24.102612 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 27 07:53:24 crc kubenswrapper[4787]: I0127 07:53:24.115630 4787 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 27 07:53:24 crc kubenswrapper[4787]: I0127 07:53:24.762355 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" event={"ID":"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c","Type":"ContainerStarted","Data":"dde55c3e68a9cf5b5b88445a04afccf06d2c0be58fb3d4a1dc8e00a181071723"} Jan 27 07:53:24 crc kubenswrapper[4787]: I0127 07:53:24.762410 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" event={"ID":"a6bbe7ce-5c2a-4d7a-84c0-80b3ae93da6c","Type":"ContainerStarted","Data":"56585961b600055370ac2c1c42d09bac6da7f2b1ae090d64881bbf99faaadff1"} Jan 27 07:53:24 crc kubenswrapper[4787]: I0127 07:53:24.781275 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ksn54" podStartSLOduration=85.781242313 podStartE2EDuration="1m25.781242313s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:24.780514468 +0000 UTC m=+110.432870040" watchObservedRunningTime="2026-01-27 07:53:24.781242313 +0000 UTC m=+110.433597865" Jan 27 07:53:25 crc kubenswrapper[4787]: I0127 07:53:25.076837 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:25 crc kubenswrapper[4787]: I0127 07:53:25.076893 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:25 crc kubenswrapper[4787]: I0127 07:53:25.077050 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:25 crc kubenswrapper[4787]: I0127 07:53:25.079842 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:25 crc kubenswrapper[4787]: E0127 07:53:25.080041 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:25 crc kubenswrapper[4787]: E0127 07:53:25.080133 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:25 crc kubenswrapper[4787]: E0127 07:53:25.080233 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:25 crc kubenswrapper[4787]: E0127 07:53:25.080388 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:26 crc kubenswrapper[4787]: I0127 07:53:26.077354 4787 scope.go:117] "RemoveContainer" containerID="6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637" Jan 27 07:53:26 crc kubenswrapper[4787]: E0127 07:53:26.077632 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7642m_openshift-ovn-kubernetes(fa44405c-042c-485a-ab6c-912dcd377751)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" Jan 27 07:53:27 crc kubenswrapper[4787]: I0127 07:53:27.076374 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:27 crc kubenswrapper[4787]: E0127 07:53:27.076633 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:27 crc kubenswrapper[4787]: I0127 07:53:27.076938 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:27 crc kubenswrapper[4787]: I0127 07:53:27.077005 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:27 crc kubenswrapper[4787]: I0127 07:53:27.076974 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:27 crc kubenswrapper[4787]: E0127 07:53:27.077138 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:27 crc kubenswrapper[4787]: E0127 07:53:27.077297 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:27 crc kubenswrapper[4787]: E0127 07:53:27.077523 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:29 crc kubenswrapper[4787]: I0127 07:53:29.075858 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:29 crc kubenswrapper[4787]: I0127 07:53:29.075908 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:29 crc kubenswrapper[4787]: I0127 07:53:29.075954 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:29 crc kubenswrapper[4787]: E0127 07:53:29.076032 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:29 crc kubenswrapper[4787]: E0127 07:53:29.076198 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:29 crc kubenswrapper[4787]: E0127 07:53:29.076257 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:29 crc kubenswrapper[4787]: I0127 07:53:29.075883 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:29 crc kubenswrapper[4787]: E0127 07:53:29.077340 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:31 crc kubenswrapper[4787]: I0127 07:53:31.076374 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:31 crc kubenswrapper[4787]: E0127 07:53:31.076519 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:31 crc kubenswrapper[4787]: I0127 07:53:31.076534 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:31 crc kubenswrapper[4787]: E0127 07:53:31.076692 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:31 crc kubenswrapper[4787]: I0127 07:53:31.076374 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:31 crc kubenswrapper[4787]: E0127 07:53:31.076783 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:31 crc kubenswrapper[4787]: I0127 07:53:31.077335 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:31 crc kubenswrapper[4787]: E0127 07:53:31.077539 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:33 crc kubenswrapper[4787]: I0127 07:53:33.075769 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:33 crc kubenswrapper[4787]: I0127 07:53:33.075940 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:33 crc kubenswrapper[4787]: I0127 07:53:33.076096 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:33 crc kubenswrapper[4787]: E0127 07:53:33.075963 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:33 crc kubenswrapper[4787]: E0127 07:53:33.076161 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:33 crc kubenswrapper[4787]: E0127 07:53:33.076295 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:33 crc kubenswrapper[4787]: I0127 07:53:33.076521 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:33 crc kubenswrapper[4787]: E0127 07:53:33.076644 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:33 crc kubenswrapper[4787]: I0127 07:53:33.794835 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rqjpz_e6f78168-0b0d-464d-b1c7-00bb9a69c0d1/kube-multus/1.log" Jan 27 07:53:33 crc kubenswrapper[4787]: I0127 07:53:33.795945 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rqjpz_e6f78168-0b0d-464d-b1c7-00bb9a69c0d1/kube-multus/0.log" Jan 27 07:53:33 crc kubenswrapper[4787]: I0127 07:53:33.796193 4787 generic.go:334] "Generic (PLEG): container finished" podID="e6f78168-0b0d-464d-b1c7-00bb9a69c0d1" containerID="4924d9f32dbc90c38c70d18db5a32bdabb76d288c34457331804829e451cf169" exitCode=1 Jan 27 07:53:33 crc kubenswrapper[4787]: I0127 07:53:33.796294 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rqjpz" event={"ID":"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1","Type":"ContainerDied","Data":"4924d9f32dbc90c38c70d18db5a32bdabb76d288c34457331804829e451cf169"} Jan 27 07:53:33 crc kubenswrapper[4787]: I0127 07:53:33.796633 4787 scope.go:117] "RemoveContainer" containerID="e73805a73ffd2c2028fa682daaf20e295ecd2f2d7e24bb9b897883f18c3c514e" Jan 27 07:53:33 crc kubenswrapper[4787]: I0127 07:53:33.797332 4787 scope.go:117] "RemoveContainer" containerID="4924d9f32dbc90c38c70d18db5a32bdabb76d288c34457331804829e451cf169" Jan 27 07:53:33 crc kubenswrapper[4787]: E0127 07:53:33.797697 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-rqjpz_openshift-multus(e6f78168-0b0d-464d-b1c7-00bb9a69c0d1)\"" pod="openshift-multus/multus-rqjpz" podUID="e6f78168-0b0d-464d-b1c7-00bb9a69c0d1" Jan 27 07:53:34 crc kubenswrapper[4787]: I0127 07:53:34.801259 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rqjpz_e6f78168-0b0d-464d-b1c7-00bb9a69c0d1/kube-multus/1.log" Jan 27 07:53:35 crc kubenswrapper[4787]: E0127 07:53:35.032970 4787 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 27 07:53:35 crc kubenswrapper[4787]: I0127 07:53:35.076293 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:35 crc kubenswrapper[4787]: I0127 07:53:35.076367 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:35 crc kubenswrapper[4787]: I0127 07:53:35.076884 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:35 crc kubenswrapper[4787]: E0127 07:53:35.076878 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:35 crc kubenswrapper[4787]: I0127 07:53:35.076976 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:35 crc kubenswrapper[4787]: E0127 07:53:35.077154 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:35 crc kubenswrapper[4787]: E0127 07:53:35.077405 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:35 crc kubenswrapper[4787]: E0127 07:53:35.077673 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:35 crc kubenswrapper[4787]: E0127 07:53:35.201598 4787 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 07:53:37 crc kubenswrapper[4787]: I0127 07:53:37.076823 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:37 crc kubenswrapper[4787]: E0127 07:53:37.076998 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:37 crc kubenswrapper[4787]: I0127 07:53:37.077109 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:37 crc kubenswrapper[4787]: I0127 07:53:37.077198 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:37 crc kubenswrapper[4787]: E0127 07:53:37.077341 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:37 crc kubenswrapper[4787]: E0127 07:53:37.077612 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:37 crc kubenswrapper[4787]: I0127 07:53:37.077211 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:37 crc kubenswrapper[4787]: E0127 07:53:37.077892 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:37 crc kubenswrapper[4787]: I0127 07:53:37.080126 4787 scope.go:117] "RemoveContainer" containerID="6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637" Jan 27 07:53:37 crc kubenswrapper[4787]: I0127 07:53:37.814468 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/3.log" Jan 27 07:53:37 crc kubenswrapper[4787]: I0127 07:53:37.818159 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerStarted","Data":"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c"} Jan 27 07:53:37 crc kubenswrapper[4787]: I0127 07:53:37.818899 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:53:38 crc kubenswrapper[4787]: I0127 07:53:38.152718 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podStartSLOduration=99.152693543 podStartE2EDuration="1m39.152693543s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:37.847894303 +0000 UTC m=+123.500249805" watchObservedRunningTime="2026-01-27 07:53:38.152693543 +0000 UTC m=+123.805049045" Jan 27 07:53:38 crc kubenswrapper[4787]: I0127 07:53:38.153274 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-vws75"] Jan 27 07:53:38 crc kubenswrapper[4787]: I0127 07:53:38.153400 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:38 crc kubenswrapper[4787]: E0127 07:53:38.153528 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:39 crc kubenswrapper[4787]: I0127 07:53:39.075959 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:39 crc kubenswrapper[4787]: E0127 07:53:39.076633 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:39 crc kubenswrapper[4787]: I0127 07:53:39.076948 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:39 crc kubenswrapper[4787]: E0127 07:53:39.077064 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:39 crc kubenswrapper[4787]: I0127 07:53:39.077270 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:39 crc kubenswrapper[4787]: E0127 07:53:39.077369 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:40 crc kubenswrapper[4787]: I0127 07:53:40.076723 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:40 crc kubenswrapper[4787]: E0127 07:53:40.078325 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:40 crc kubenswrapper[4787]: E0127 07:53:40.202624 4787 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 07:53:41 crc kubenswrapper[4787]: I0127 07:53:41.076616 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:41 crc kubenswrapper[4787]: I0127 07:53:41.076738 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:41 crc kubenswrapper[4787]: I0127 07:53:41.076977 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:41 crc kubenswrapper[4787]: E0127 07:53:41.077171 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:41 crc kubenswrapper[4787]: E0127 07:53:41.077310 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:41 crc kubenswrapper[4787]: E0127 07:53:41.077502 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:42 crc kubenswrapper[4787]: I0127 07:53:42.076061 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:42 crc kubenswrapper[4787]: E0127 07:53:42.076317 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:43 crc kubenswrapper[4787]: I0127 07:53:43.076068 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:43 crc kubenswrapper[4787]: I0127 07:53:43.076172 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:43 crc kubenswrapper[4787]: E0127 07:53:43.076243 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:43 crc kubenswrapper[4787]: I0127 07:53:43.076087 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:43 crc kubenswrapper[4787]: E0127 07:53:43.076459 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:43 crc kubenswrapper[4787]: E0127 07:53:43.076448 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:43 crc kubenswrapper[4787]: I0127 07:53:43.322118 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 07:53:44 crc kubenswrapper[4787]: I0127 07:53:44.076483 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:44 crc kubenswrapper[4787]: E0127 07:53:44.076730 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:45 crc kubenswrapper[4787]: I0127 07:53:45.075981 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:45 crc kubenswrapper[4787]: I0127 07:53:45.076072 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:45 crc kubenswrapper[4787]: I0127 07:53:45.077769 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:45 crc kubenswrapper[4787]: E0127 07:53:45.077758 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:45 crc kubenswrapper[4787]: E0127 07:53:45.077955 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:45 crc kubenswrapper[4787]: E0127 07:53:45.078145 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:45 crc kubenswrapper[4787]: E0127 07:53:45.203507 4787 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 07:53:46 crc kubenswrapper[4787]: I0127 07:53:46.075753 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:46 crc kubenswrapper[4787]: E0127 07:53:46.075964 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:46 crc kubenswrapper[4787]: I0127 07:53:46.077675 4787 scope.go:117] "RemoveContainer" containerID="4924d9f32dbc90c38c70d18db5a32bdabb76d288c34457331804829e451cf169" Jan 27 07:53:46 crc kubenswrapper[4787]: I0127 07:53:46.861137 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rqjpz_e6f78168-0b0d-464d-b1c7-00bb9a69c0d1/kube-multus/1.log" Jan 27 07:53:46 crc kubenswrapper[4787]: I0127 07:53:46.861518 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rqjpz" event={"ID":"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1","Type":"ContainerStarted","Data":"30eb607bd3c5a74648f4c24cbbf8118159296bb63fb30a76b991d8fdb94cb16a"} Jan 27 07:53:47 crc kubenswrapper[4787]: I0127 07:53:47.075761 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:47 crc kubenswrapper[4787]: I0127 07:53:47.075773 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:47 crc kubenswrapper[4787]: I0127 07:53:47.076035 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:47 crc kubenswrapper[4787]: E0127 07:53:47.075902 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:47 crc kubenswrapper[4787]: E0127 07:53:47.076178 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:47 crc kubenswrapper[4787]: E0127 07:53:47.076222 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:48 crc kubenswrapper[4787]: I0127 07:53:48.075876 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:48 crc kubenswrapper[4787]: E0127 07:53:48.076132 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:49 crc kubenswrapper[4787]: I0127 07:53:49.076603 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:49 crc kubenswrapper[4787]: I0127 07:53:49.076652 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:49 crc kubenswrapper[4787]: I0127 07:53:49.077084 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:49 crc kubenswrapper[4787]: E0127 07:53:49.077344 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 07:53:49 crc kubenswrapper[4787]: E0127 07:53:49.077800 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 07:53:49 crc kubenswrapper[4787]: E0127 07:53:49.077998 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 07:53:50 crc kubenswrapper[4787]: I0127 07:53:50.076211 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:50 crc kubenswrapper[4787]: E0127 07:53:50.076379 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vws75" podUID="3969f21f-ab36-49b4-9a9c-02cf19e65ad0" Jan 27 07:53:51 crc kubenswrapper[4787]: I0127 07:53:51.076640 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:53:51 crc kubenswrapper[4787]: I0127 07:53:51.076640 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:53:51 crc kubenswrapper[4787]: I0127 07:53:51.076664 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:53:51 crc kubenswrapper[4787]: I0127 07:53:51.078952 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 27 07:53:51 crc kubenswrapper[4787]: I0127 07:53:51.079287 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 27 07:53:51 crc kubenswrapper[4787]: I0127 07:53:51.079344 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 27 07:53:51 crc kubenswrapper[4787]: I0127 07:53:51.079383 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 27 07:53:52 crc kubenswrapper[4787]: I0127 07:53:52.076024 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:53:52 crc kubenswrapper[4787]: I0127 07:53:52.078807 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 27 07:53:52 crc kubenswrapper[4787]: I0127 07:53:52.080200 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.417607 4787 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.473375 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.474531 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.478956 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.479126 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.481205 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zkx8b"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.482493 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.483002 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.483614 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.483941 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.484543 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.485538 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.487322 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.487911 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.488831 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.489186 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.489685 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.490027 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.491462 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.491857 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.492201 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.492500 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.492531 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.492576 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.493161 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.493180 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.493953 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.494160 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.494352 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.498429 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.499497 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.500334 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-qptnb"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.500922 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.501951 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-84k56"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.502543 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.502908 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.503112 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.503391 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.504002 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.504227 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.505465 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-xwx4w"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.506910 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.507087 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.507167 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.507277 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.507372 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.507590 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.507628 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.507757 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.507848 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.508004 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.508105 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.508166 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.508259 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.508678 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.508726 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.508817 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.508837 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.508923 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.508994 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.509042 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.509401 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.509591 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.511287 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wdppr"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.511962 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-ctv89"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.512195 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-xwx4w" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.512396 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.512652 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.512788 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.512905 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.528082 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.532237 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.533497 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.534464 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.550848 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.551332 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g44f6"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.551704 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-qtkc8"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.552175 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.552268 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.553219 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.553465 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.553471 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.554818 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.556683 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-audit-policies\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.556713 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.556747 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-audit-dir\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.556805 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-serving-cert\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.556841 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-etcd-client\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.556861 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.556899 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4wk4\" (UniqueName: \"kubernetes.io/projected/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-kube-api-access-b4wk4\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.556921 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-encryption-config\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.556697 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.557193 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.557280 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.557290 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.557348 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.557410 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.558887 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.559077 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.561541 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.561713 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.562034 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.565223 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.566117 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.571823 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.572372 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g44f6"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.573752 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.573947 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.576110 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.577058 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.578392 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.578911 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.578945 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.579400 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.579464 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.580271 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.580289 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.581124 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.582939 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-84k56"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.587219 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.587857 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.587989 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.588191 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.588314 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.588415 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.588510 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.588634 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.588748 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.589263 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.589649 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.589840 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.590043 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.592241 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zkx8b"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.594171 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.596769 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.596907 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.597084 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.616997 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.617395 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-xwx4w"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.622072 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.622839 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.626130 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.626927 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.627779 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.643175 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.646297 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.647658 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-mnzjr"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.648344 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-t6xzm"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.648853 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-t6xzm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.649182 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.649326 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.650256 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.650535 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-d44bj"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.651490 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.651851 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.653445 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.653600 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.653750 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.655241 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.658860 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.661596 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/3ef9e285-88a8-499d-8fb8-e4c882336e68-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-qtkc8\" (UID: \"3ef9e285-88a8-499d-8fb8-e4c882336e68\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.661649 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/832859dc-03b3-4b2b-9a40-75372ebb38d9-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mnrsb\" (UID: \"832859dc-03b3-4b2b-9a40-75372ebb38d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.661686 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0259775f-1fef-486a-bc17-4638e38ed83f-client-ca\") pod \"route-controller-manager-6576b87f9c-dlz8t\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.661719 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.661746 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0259775f-1fef-486a-bc17-4638e38ed83f-config\") pod \"route-controller-manager-6576b87f9c-dlz8t\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.661769 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5qpv\" (UniqueName: \"kubernetes.io/projected/a1672707-9776-4274-9bdd-4f1dabf83038-kube-api-access-s5qpv\") pod \"openshift-config-operator-7777fb866f-hn2j2\" (UID: \"a1672707-9776-4274-9bdd-4f1dabf83038\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.661803 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0a3473c0-d321-4be8-860f-bd51210cb58f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-ln7rp\" (UID: \"0a3473c0-d321-4be8-860f-bd51210cb58f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.661845 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.661867 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba0745e4-7610-44e2-9a65-cd3875393d64-serving-cert\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.661889 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-audit\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.661911 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662021 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daf6a704-5857-419a-bfaa-e2af3a05d386-config\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662084 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-service-ca\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662185 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfw62\" (UniqueName: \"kubernetes.io/projected/7c9507a4-925b-418e-b824-f338cd69a66e-kube-api-access-nfw62\") pod \"cluster-samples-operator-665b6dd947-q44w4\" (UID: \"7c9507a4-925b-418e-b824-f338cd69a66e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662278 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-audit-dir\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662320 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxmsq\" (UniqueName: \"kubernetes.io/projected/ca4c081d-896c-4cc8-9656-59364376de35-kube-api-access-kxmsq\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662319 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662396 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662423 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ba0745e4-7610-44e2-9a65-cd3875393d64-encryption-config\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662451 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662481 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-trusted-ca-bundle\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662541 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-client-ca\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662596 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/ba0745e4-7610-44e2-9a65-cd3875393d64-node-pullsecrets\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662621 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662680 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stktz\" (UniqueName: \"kubernetes.io/projected/0259775f-1fef-486a-bc17-4638e38ed83f-kube-api-access-stktz\") pod \"route-controller-manager-6576b87f9c-dlz8t\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662834 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662906 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-audit-dir\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662935 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af87df13-9164-4e5e-99d4-5cbd9dfe80a5-serving-cert\") pod \"console-operator-58897d9998-ctv89\" (UID: \"af87df13-9164-4e5e-99d4-5cbd9dfe80a5\") " pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.662961 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7r6d\" (UniqueName: \"kubernetes.io/projected/ba0745e4-7610-44e2-9a65-cd3875393d64-kube-api-access-m7r6d\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663006 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663005 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3ef9e285-88a8-499d-8fb8-e4c882336e68-images\") pod \"machine-api-operator-5694c8668f-qtkc8\" (UID: \"3ef9e285-88a8-499d-8fb8-e4c882336e68\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663295 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-serving-cert\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663386 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-config\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663413 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af87df13-9164-4e5e-99d4-5cbd9dfe80a5-config\") pod \"console-operator-58897d9998-ctv89\" (UID: \"af87df13-9164-4e5e-99d4-5cbd9dfe80a5\") " pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663436 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c54z8\" (UniqueName: \"kubernetes.io/projected/fcc64739-2fd2-4413-a19e-0bc14dd883d6-kube-api-access-c54z8\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663459 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/daf6a704-5857-419a-bfaa-e2af3a05d386-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663509 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfjfh\" (UniqueName: \"kubernetes.io/projected/832859dc-03b3-4b2b-9a40-75372ebb38d9-kube-api-access-dfjfh\") pod \"openshift-apiserver-operator-796bbdcf4f-mnrsb\" (UID: \"832859dc-03b3-4b2b-9a40-75372ebb38d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663541 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663584 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp227\" (UniqueName: \"kubernetes.io/projected/0a3473c0-d321-4be8-860f-bd51210cb58f-kube-api-access-tp227\") pod \"cluster-image-registry-operator-dc59b4c8b-ln7rp\" (UID: \"0a3473c0-d321-4be8-860f-bd51210cb58f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663607 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663624 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca4c081d-896c-4cc8-9656-59364376de35-serving-cert\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663641 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-config\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663657 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663679 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663708 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4wk4\" (UniqueName: \"kubernetes.io/projected/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-kube-api-access-b4wk4\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663730 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/287286a8-80b2-4c95-948d-a096153d8e51-machine-approver-tls\") pod \"machine-approver-56656f9798-tfh8x\" (UID: \"287286a8-80b2-4c95-948d-a096153d8e51\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663753 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-encryption-config\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663771 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-etcd-serving-ca\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663791 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fcc64739-2fd2-4413-a19e-0bc14dd883d6-audit-dir\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663809 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjkkn\" (UniqueName: \"kubernetes.io/projected/daf6a704-5857-419a-bfaa-e2af3a05d386-kube-api-access-mjkkn\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663851 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-oauth-config\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663873 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-oauth-serving-cert\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663899 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5k9vf\" (UniqueName: \"kubernetes.io/projected/af87df13-9164-4e5e-99d4-5cbd9dfe80a5-kube-api-access-5k9vf\") pod \"console-operator-58897d9998-ctv89\" (UID: \"af87df13-9164-4e5e-99d4-5cbd9dfe80a5\") " pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663916 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a3473c0-d321-4be8-860f-bd51210cb58f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-ln7rp\" (UID: \"0a3473c0-d321-4be8-860f-bd51210cb58f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.663935 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvmrj\" (UniqueName: \"kubernetes.io/projected/287286a8-80b2-4c95-948d-a096153d8e51-kube-api-access-mvmrj\") pod \"machine-approver-56656f9798-tfh8x\" (UID: \"287286a8-80b2-4c95-948d-a096153d8e51\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.664171 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-audit-policies\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.664276 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.664296 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/daf6a704-5857-419a-bfaa-e2af3a05d386-serving-cert\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.664316 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-serving-cert\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.664339 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.664357 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v9n9\" (UniqueName: \"kubernetes.io/projected/301e3f1a-19c5-47a7-b85d-81676098f971-kube-api-access-5v9n9\") pod \"downloads-7954f5f757-xwx4w\" (UID: \"301e3f1a-19c5-47a7-b85d-81676098f971\") " pod="openshift-console/downloads-7954f5f757-xwx4w" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.664373 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/daf6a704-5857-419a-bfaa-e2af3a05d386-service-ca-bundle\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.664422 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/832859dc-03b3-4b2b-9a40-75372ebb38d9-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mnrsb\" (UID: \"832859dc-03b3-4b2b-9a40-75372ebb38d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.664447 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a1672707-9776-4274-9bdd-4f1dabf83038-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hn2j2\" (UID: \"a1672707-9776-4274-9bdd-4f1dabf83038\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.664473 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.664491 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0259775f-1fef-486a-bc17-4638e38ed83f-serving-cert\") pod \"route-controller-manager-6576b87f9c-dlz8t\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.664533 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.664592 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ba0745e4-7610-44e2-9a65-cd3875393d64-etcd-client\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665075 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1672707-9776-4274-9bdd-4f1dabf83038-serving-cert\") pod \"openshift-config-operator-7777fb866f-hn2j2\" (UID: \"a1672707-9776-4274-9bdd-4f1dabf83038\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665131 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/af87df13-9164-4e5e-99d4-5cbd9dfe80a5-trusted-ca\") pod \"console-operator-58897d9998-ctv89\" (UID: \"af87df13-9164-4e5e-99d4-5cbd9dfe80a5\") " pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665151 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cbr9\" (UniqueName: \"kubernetes.io/projected/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-kube-api-access-9cbr9\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665200 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665216 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665235 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-config\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665255 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ef9e285-88a8-499d-8fb8-e4c882336e68-config\") pod \"machine-api-operator-5694c8668f-qtkc8\" (UID: \"3ef9e285-88a8-499d-8fb8-e4c882336e68\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665273 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/287286a8-80b2-4c95-948d-a096153d8e51-auth-proxy-config\") pod \"machine-approver-56656f9798-tfh8x\" (UID: \"287286a8-80b2-4c95-948d-a096153d8e51\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665295 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-image-import-ca\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665314 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-audit-policies\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665331 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/287286a8-80b2-4c95-948d-a096153d8e51-config\") pod \"machine-approver-56656f9798-tfh8x\" (UID: \"287286a8-80b2-4c95-948d-a096153d8e51\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665356 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/7c9507a4-925b-418e-b824-f338cd69a66e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-q44w4\" (UID: \"7c9507a4-925b-418e-b824-f338cd69a66e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665379 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kgzr\" (UniqueName: \"kubernetes.io/projected/3ef9e285-88a8-499d-8fb8-e4c882336e68-kube-api-access-7kgzr\") pod \"machine-api-operator-5694c8668f-qtkc8\" (UID: \"3ef9e285-88a8-499d-8fb8-e4c882336e68\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665400 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-etcd-client\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665416 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ba0745e4-7610-44e2-9a65-cd3875393d64-audit-dir\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665433 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0a3473c0-d321-4be8-860f-bd51210cb58f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-ln7rp\" (UID: \"0a3473c0-d321-4be8-860f-bd51210cb58f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665597 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.665958 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.666197 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-audit-policies\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.666324 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.666344 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.666475 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.666570 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.668480 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.669019 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.671743 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.672292 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.673835 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-qptnb"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.675936 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-encryption-config\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.676080 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.676376 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-etcd-client\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.676812 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.677661 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.678613 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wdvfw"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.678963 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.679420 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wdvfw" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.680364 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.683179 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.684079 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.686167 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-serving-cert\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.686559 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wdppr"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.688169 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-ctv89"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.689759 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.690780 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.691104 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.691263 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.691397 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.692174 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.693642 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.698029 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-h4cr4"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.698861 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.700154 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-h4cr4" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.700419 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-h5d4g"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.701090 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.701398 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-9xlcr"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.702228 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.705758 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.708689 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-9xlcr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.713361 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-qtkc8"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.713529 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-jzdds"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.718869 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.737141 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-mnzjr"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.737214 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tx6fd"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.738326 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-t6xzm"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.738350 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-crvsg"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.738940 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-crvsg" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.739226 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.740253 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.740332 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-tx6fd" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.741913 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.742814 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.753099 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-d44bj"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.753170 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.753756 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.755432 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.756793 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.757143 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-rkzlx"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.758116 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-rkzlx" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.758499 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.759623 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.761216 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.762329 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.763614 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.764743 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-h4cr4"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.766174 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/287286a8-80b2-4c95-948d-a096153d8e51-config\") pod \"machine-approver-56656f9798-tfh8x\" (UID: \"287286a8-80b2-4c95-948d-a096153d8e51\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.766286 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/7c9507a4-925b-418e-b824-f338cd69a66e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-q44w4\" (UID: \"7c9507a4-925b-418e-b824-f338cd69a66e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.766363 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-image-import-ca\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.766453 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-audit-policies\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.766573 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kgzr\" (UniqueName: \"kubernetes.io/projected/3ef9e285-88a8-499d-8fb8-e4c882336e68-kube-api-access-7kgzr\") pod \"machine-api-operator-5694c8668f-qtkc8\" (UID: \"3ef9e285-88a8-499d-8fb8-e4c882336e68\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.766701 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f063d15e-fe05-46d3-8304-72da67594ea6-etcd-client\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.766791 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ba0745e4-7610-44e2-9a65-cd3875393d64-audit-dir\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.766889 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0a3473c0-d321-4be8-860f-bd51210cb58f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-ln7rp\" (UID: \"0a3473c0-d321-4be8-860f-bd51210cb58f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.766982 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f063d15e-fe05-46d3-8304-72da67594ea6-serving-cert\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.767091 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5-metrics-tls\") pod \"ingress-operator-5b745b69d9-cqdk9\" (UID: \"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.767193 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/3ef9e285-88a8-499d-8fb8-e4c882336e68-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-qtkc8\" (UID: \"3ef9e285-88a8-499d-8fb8-e4c882336e68\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.767276 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l85r5\" (UniqueName: \"kubernetes.io/projected/c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3-kube-api-access-l85r5\") pod \"machine-config-controller-84d6567774-vl4tj\" (UID: \"c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.767360 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/832859dc-03b3-4b2b-9a40-75372ebb38d9-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mnrsb\" (UID: \"832859dc-03b3-4b2b-9a40-75372ebb38d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.767430 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0259775f-1fef-486a-bc17-4638e38ed83f-client-ca\") pod \"route-controller-manager-6576b87f9c-dlz8t\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.767518 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/f063d15e-fe05-46d3-8304-72da67594ea6-etcd-ca\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.767627 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4c622447-70d1-4b27-b09c-6fa6402f632c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-4xj5z\" (UID: \"4c622447-70d1-4b27-b09c-6fa6402f632c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.767744 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5qpv\" (UniqueName: \"kubernetes.io/projected/a1672707-9776-4274-9bdd-4f1dabf83038-kube-api-access-s5qpv\") pod \"openshift-config-operator-7777fb866f-hn2j2\" (UID: \"a1672707-9776-4274-9bdd-4f1dabf83038\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.767844 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5-trusted-ca\") pod \"ingress-operator-5b745b69d9-cqdk9\" (UID: \"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.767956 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.768056 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0259775f-1fef-486a-bc17-4638e38ed83f-config\") pod \"route-controller-manager-6576b87f9c-dlz8t\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.768164 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0a3473c0-d321-4be8-860f-bd51210cb58f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-ln7rp\" (UID: \"0a3473c0-d321-4be8-860f-bd51210cb58f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.768262 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3-proxy-tls\") pod \"machine-config-controller-84d6567774-vl4tj\" (UID: \"c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.768361 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba0745e4-7610-44e2-9a65-cd3875393d64-serving-cert\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.768470 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.768695 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daf6a704-5857-419a-bfaa-e2af3a05d386-config\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770171 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-service-ca\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770250 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ba0745e4-7610-44e2-9a65-cd3875393d64-audit-dir\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770263 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-audit\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770348 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfw62\" (UniqueName: \"kubernetes.io/projected/7c9507a4-925b-418e-b824-f338cd69a66e-kube-api-access-nfw62\") pod \"cluster-samples-operator-665b6dd947-q44w4\" (UID: \"7c9507a4-925b-418e-b824-f338cd69a66e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770388 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ba0745e4-7610-44e2-9a65-cd3875393d64-encryption-config\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770421 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770451 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-trusted-ca-bundle\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770483 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-vl4tj\" (UID: \"c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770520 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxmsq\" (UniqueName: \"kubernetes.io/projected/ca4c081d-896c-4cc8-9656-59364376de35-kube-api-access-kxmsq\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770583 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770613 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-client-ca\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770660 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/ba0745e4-7610-44e2-9a65-cd3875393d64-node-pullsecrets\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770693 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770720 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stktz\" (UniqueName: \"kubernetes.io/projected/0259775f-1fef-486a-bc17-4638e38ed83f-kube-api-access-stktz\") pod \"route-controller-manager-6576b87f9c-dlz8t\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770771 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7r6d\" (UniqueName: \"kubernetes.io/projected/ba0745e4-7610-44e2-9a65-cd3875393d64-kube-api-access-m7r6d\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770799 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3ef9e285-88a8-499d-8fb8-e4c882336e68-images\") pod \"machine-api-operator-5694c8668f-qtkc8\" (UID: \"3ef9e285-88a8-499d-8fb8-e4c882336e68\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770843 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af87df13-9164-4e5e-99d4-5cbd9dfe80a5-serving-cert\") pod \"console-operator-58897d9998-ctv89\" (UID: \"af87df13-9164-4e5e-99d4-5cbd9dfe80a5\") " pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770876 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-config\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770910 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvsj6\" (UniqueName: \"kubernetes.io/projected/c71976d9-f7ec-4258-8bf5-08a526607fd9-kube-api-access-vvsj6\") pod \"control-plane-machine-set-operator-78cbb6b69f-wdvfw\" (UID: \"c71976d9-f7ec-4258-8bf5-08a526607fd9\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wdvfw" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770938 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c54z8\" (UniqueName: \"kubernetes.io/projected/fcc64739-2fd2-4413-a19e-0bc14dd883d6-kube-api-access-c54z8\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.770966 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/daf6a704-5857-419a-bfaa-e2af3a05d386-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771046 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfjfh\" (UniqueName: \"kubernetes.io/projected/832859dc-03b3-4b2b-9a40-75372ebb38d9-kube-api-access-dfjfh\") pod \"openshift-apiserver-operator-796bbdcf4f-mnrsb\" (UID: \"832859dc-03b3-4b2b-9a40-75372ebb38d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771080 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b9018b2-8e63-4c80-8c71-cc9fa7ddb853-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-zgw8m\" (UID: \"3b9018b2-8e63-4c80-8c71-cc9fa7ddb853\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771118 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af87df13-9164-4e5e-99d4-5cbd9dfe80a5-config\") pod \"console-operator-58897d9998-ctv89\" (UID: \"af87df13-9164-4e5e-99d4-5cbd9dfe80a5\") " pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771151 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771184 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp227\" (UniqueName: \"kubernetes.io/projected/0a3473c0-d321-4be8-860f-bd51210cb58f-kube-api-access-tp227\") pod \"cluster-image-registry-operator-dc59b4c8b-ln7rp\" (UID: \"0a3473c0-d321-4be8-860f-bd51210cb58f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771217 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca4c081d-896c-4cc8-9656-59364376de35-serving-cert\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771246 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-config\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771273 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771310 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771340 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/287286a8-80b2-4c95-948d-a096153d8e51-machine-approver-tls\") pod \"machine-approver-56656f9798-tfh8x\" (UID: \"287286a8-80b2-4c95-948d-a096153d8e51\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771382 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-etcd-serving-ca\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771414 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fcc64739-2fd2-4413-a19e-0bc14dd883d6-audit-dir\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771442 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjkkn\" (UniqueName: \"kubernetes.io/projected/daf6a704-5857-419a-bfaa-e2af3a05d386-kube-api-access-mjkkn\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771470 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-oauth-config\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771507 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-oauth-serving-cert\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771539 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/f063d15e-fe05-46d3-8304-72da67594ea6-etcd-service-ca\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771587 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c71976d9-f7ec-4258-8bf5-08a526607fd9-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wdvfw\" (UID: \"c71976d9-f7ec-4258-8bf5-08a526607fd9\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wdvfw" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771620 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5k9vf\" (UniqueName: \"kubernetes.io/projected/af87df13-9164-4e5e-99d4-5cbd9dfe80a5-kube-api-access-5k9vf\") pod \"console-operator-58897d9998-ctv89\" (UID: \"af87df13-9164-4e5e-99d4-5cbd9dfe80a5\") " pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771650 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a3473c0-d321-4be8-860f-bd51210cb58f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-ln7rp\" (UID: \"0a3473c0-d321-4be8-860f-bd51210cb58f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771679 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvmrj\" (UniqueName: \"kubernetes.io/projected/287286a8-80b2-4c95-948d-a096153d8e51-kube-api-access-mvmrj\") pod \"machine-approver-56656f9798-tfh8x\" (UID: \"287286a8-80b2-4c95-948d-a096153d8e51\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771726 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9rhv\" (UniqueName: \"kubernetes.io/projected/3b9018b2-8e63-4c80-8c71-cc9fa7ddb853-kube-api-access-t9rhv\") pod \"openshift-controller-manager-operator-756b6f6bc6-zgw8m\" (UID: \"3b9018b2-8e63-4c80-8c71-cc9fa7ddb853\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771759 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771788 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/daf6a704-5857-419a-bfaa-e2af3a05d386-serving-cert\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771833 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-serving-cert\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771859 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/daf6a704-5857-419a-bfaa-e2af3a05d386-service-ca-bundle\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771886 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/832859dc-03b3-4b2b-9a40-75372ebb38d9-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mnrsb\" (UID: \"832859dc-03b3-4b2b-9a40-75372ebb38d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771914 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a1672707-9776-4274-9bdd-4f1dabf83038-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hn2j2\" (UID: \"a1672707-9776-4274-9bdd-4f1dabf83038\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771942 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrtv2\" (UniqueName: \"kubernetes.io/projected/52652793-dcbd-4568-a32e-b727818c61a8-kube-api-access-xrtv2\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9n62\" (UID: \"52652793-dcbd-4568-a32e-b727818c61a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.771984 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772012 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v9n9\" (UniqueName: \"kubernetes.io/projected/301e3f1a-19c5-47a7-b85d-81676098f971-kube-api-access-5v9n9\") pod \"downloads-7954f5f757-xwx4w\" (UID: \"301e3f1a-19c5-47a7-b85d-81676098f971\") " pod="openshift-console/downloads-7954f5f757-xwx4w" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772041 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772070 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0259775f-1fef-486a-bc17-4638e38ed83f-serving-cert\") pod \"route-controller-manager-6576b87f9c-dlz8t\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772113 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qvpd\" (UniqueName: \"kubernetes.io/projected/4c622447-70d1-4b27-b09c-6fa6402f632c-kube-api-access-8qvpd\") pod \"package-server-manager-789f6589d5-4xj5z\" (UID: \"4c622447-70d1-4b27-b09c-6fa6402f632c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772121 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.766797 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/287286a8-80b2-4c95-948d-a096153d8e51-config\") pod \"machine-approver-56656f9798-tfh8x\" (UID: \"287286a8-80b2-4c95-948d-a096153d8e51\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772193 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wdvfw"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772211 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772141 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52652793-dcbd-4568-a32e-b727818c61a8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9n62\" (UID: \"52652793-dcbd-4568-a32e-b727818c61a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772297 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ba0745e4-7610-44e2-9a65-cd3875393d64-etcd-client\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772334 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52652793-dcbd-4568-a32e-b727818c61a8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9n62\" (UID: \"52652793-dcbd-4568-a32e-b727818c61a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772367 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772390 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f063d15e-fe05-46d3-8304-72da67594ea6-config\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772414 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49hhm\" (UniqueName: \"kubernetes.io/projected/2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5-kube-api-access-49hhm\") pod \"ingress-operator-5b745b69d9-cqdk9\" (UID: \"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772441 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1672707-9776-4274-9bdd-4f1dabf83038-serving-cert\") pod \"openshift-config-operator-7777fb866f-hn2j2\" (UID: \"a1672707-9776-4274-9bdd-4f1dabf83038\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772463 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/af87df13-9164-4e5e-99d4-5cbd9dfe80a5-trusted-ca\") pod \"console-operator-58897d9998-ctv89\" (UID: \"af87df13-9164-4e5e-99d4-5cbd9dfe80a5\") " pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772486 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cbr9\" (UniqueName: \"kubernetes.io/projected/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-kube-api-access-9cbr9\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772505 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-cqdk9\" (UID: \"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772530 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b9018b2-8e63-4c80-8c71-cc9fa7ddb853-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-zgw8m\" (UID: \"3b9018b2-8e63-4c80-8c71-cc9fa7ddb853\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772608 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772630 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgqps\" (UniqueName: \"kubernetes.io/projected/f063d15e-fe05-46d3-8304-72da67594ea6-kube-api-access-dgqps\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772656 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-config\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772676 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ef9e285-88a8-499d-8fb8-e4c882336e68-config\") pod \"machine-api-operator-5694c8668f-qtkc8\" (UID: \"3ef9e285-88a8-499d-8fb8-e4c882336e68\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.772697 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/287286a8-80b2-4c95-948d-a096153d8e51-auth-proxy-config\") pod \"machine-approver-56656f9798-tfh8x\" (UID: \"287286a8-80b2-4c95-948d-a096153d8e51\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.773628 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/287286a8-80b2-4c95-948d-a096153d8e51-auth-proxy-config\") pod \"machine-approver-56656f9798-tfh8x\" (UID: \"287286a8-80b2-4c95-948d-a096153d8e51\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.773631 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-audit\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.769749 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-image-import-ca\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.767323 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-audit-policies\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.775804 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/3ef9e285-88a8-499d-8fb8-e4c882336e68-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-qtkc8\" (UID: \"3ef9e285-88a8-499d-8fb8-e4c882336e68\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.776044 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-service-ca\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.769978 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daf6a704-5857-419a-bfaa-e2af3a05d386-config\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.776445 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/af87df13-9164-4e5e-99d4-5cbd9dfe80a5-trusted-ca\") pod \"console-operator-58897d9998-ctv89\" (UID: \"af87df13-9164-4e5e-99d4-5cbd9dfe80a5\") " pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.776539 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af87df13-9164-4e5e-99d4-5cbd9dfe80a5-config\") pod \"console-operator-58897d9998-ctv89\" (UID: \"af87df13-9164-4e5e-99d4-5cbd9dfe80a5\") " pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.780224 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-config\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.780455 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/7c9507a4-925b-418e-b824-f338cd69a66e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-q44w4\" (UID: \"7c9507a4-925b-418e-b824-f338cd69a66e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.780727 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ef9e285-88a8-499d-8fb8-e4c882336e68-config\") pod \"machine-api-operator-5694c8668f-qtkc8\" (UID: \"3ef9e285-88a8-499d-8fb8-e4c882336e68\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.781002 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.781094 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.781335 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1672707-9776-4274-9bdd-4f1dabf83038-serving-cert\") pod \"openshift-config-operator-7777fb866f-hn2j2\" (UID: \"a1672707-9776-4274-9bdd-4f1dabf83038\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.781388 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.781810 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/daf6a704-5857-419a-bfaa-e2af3a05d386-service-ca-bundle\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.781868 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/832859dc-03b3-4b2b-9a40-75372ebb38d9-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mnrsb\" (UID: \"832859dc-03b3-4b2b-9a40-75372ebb38d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.781931 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a1672707-9776-4274-9bdd-4f1dabf83038-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hn2j2\" (UID: \"a1672707-9776-4274-9bdd-4f1dabf83038\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.782075 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.782541 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.782834 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-trusted-ca-bundle\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.783378 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a3473c0-d321-4be8-860f-bd51210cb58f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-ln7rp\" (UID: \"0a3473c0-d321-4be8-860f-bd51210cb58f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.783901 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.784190 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.784387 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-config\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.784391 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0259775f-1fef-486a-bc17-4638e38ed83f-client-ca\") pod \"route-controller-manager-6576b87f9c-dlz8t\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.785015 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0259775f-1fef-486a-bc17-4638e38ed83f-config\") pod \"route-controller-manager-6576b87f9c-dlz8t\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.785749 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-etcd-serving-ca\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.785752 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0259775f-1fef-486a-bc17-4638e38ed83f-serving-cert\") pod \"route-controller-manager-6576b87f9c-dlz8t\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.785820 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-config\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.786097 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fcc64739-2fd2-4413-a19e-0bc14dd883d6-audit-dir\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.786228 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/287286a8-80b2-4c95-948d-a096153d8e51-machine-approver-tls\") pod \"machine-approver-56656f9798-tfh8x\" (UID: \"287286a8-80b2-4c95-948d-a096153d8e51\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.787041 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.787220 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba0745e4-7610-44e2-9a65-cd3875393d64-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.787568 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca4c081d-896c-4cc8-9656-59364376de35-serving-cert\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.787721 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.788204 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-oauth-serving-cert\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.788630 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-client-ca\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.788683 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-crvsg"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.788718 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-h5d4g"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.789112 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.789202 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/ba0745e4-7610-44e2-9a65-cd3875393d64-node-pullsecrets\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.789171 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.789128 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ba0745e4-7610-44e2-9a65-cd3875393d64-etcd-client\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.790016 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/daf6a704-5857-419a-bfaa-e2af3a05d386-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.790078 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.790029 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3ef9e285-88a8-499d-8fb8-e4c882336e68-images\") pod \"machine-api-operator-5694c8668f-qtkc8\" (UID: \"3ef9e285-88a8-499d-8fb8-e4c882336e68\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.790278 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/832859dc-03b3-4b2b-9a40-75372ebb38d9-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mnrsb\" (UID: \"832859dc-03b3-4b2b-9a40-75372ebb38d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.790323 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.790612 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ba0745e4-7610-44e2-9a65-cd3875393d64-encryption-config\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.791022 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.791145 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af87df13-9164-4e5e-99d4-5cbd9dfe80a5-serving-cert\") pod \"console-operator-58897d9998-ctv89\" (UID: \"af87df13-9164-4e5e-99d4-5cbd9dfe80a5\") " pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.791792 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-oauth-config\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.792852 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.792940 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba0745e4-7610-44e2-9a65-cd3875393d64-serving-cert\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.793357 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-serving-cert\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.794615 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.795532 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/daf6a704-5857-419a-bfaa-e2af3a05d386-serving-cert\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.796320 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0a3473c0-d321-4be8-860f-bd51210cb58f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-ln7rp\" (UID: \"0a3473c0-d321-4be8-860f-bd51210cb58f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.796541 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.804625 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-rkzlx"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.804722 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tx6fd"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.806662 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-znxn2"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.807571 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mwnxd"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.807784 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-znxn2" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.808907 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-znxn2"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.809029 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.812269 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mwnxd"] Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.817540 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.836932 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.873582 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f063d15e-fe05-46d3-8304-72da67594ea6-etcd-client\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.873677 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f063d15e-fe05-46d3-8304-72da67594ea6-serving-cert\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874018 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5-metrics-tls\") pod \"ingress-operator-5b745b69d9-cqdk9\" (UID: \"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874109 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l85r5\" (UniqueName: \"kubernetes.io/projected/c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3-kube-api-access-l85r5\") pod \"machine-config-controller-84d6567774-vl4tj\" (UID: \"c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874178 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/f063d15e-fe05-46d3-8304-72da67594ea6-etcd-ca\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874209 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4c622447-70d1-4b27-b09c-6fa6402f632c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-4xj5z\" (UID: \"4c622447-70d1-4b27-b09c-6fa6402f632c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874291 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5-trusted-ca\") pod \"ingress-operator-5b745b69d9-cqdk9\" (UID: \"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874351 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3-proxy-tls\") pod \"machine-config-controller-84d6567774-vl4tj\" (UID: \"c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874440 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-vl4tj\" (UID: \"c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874539 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvsj6\" (UniqueName: \"kubernetes.io/projected/c71976d9-f7ec-4258-8bf5-08a526607fd9-kube-api-access-vvsj6\") pod \"control-plane-machine-set-operator-78cbb6b69f-wdvfw\" (UID: \"c71976d9-f7ec-4258-8bf5-08a526607fd9\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wdvfw" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874606 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b9018b2-8e63-4c80-8c71-cc9fa7ddb853-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-zgw8m\" (UID: \"3b9018b2-8e63-4c80-8c71-cc9fa7ddb853\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874707 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/f063d15e-fe05-46d3-8304-72da67594ea6-etcd-service-ca\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874737 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c71976d9-f7ec-4258-8bf5-08a526607fd9-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wdvfw\" (UID: \"c71976d9-f7ec-4258-8bf5-08a526607fd9\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wdvfw" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874792 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9rhv\" (UniqueName: \"kubernetes.io/projected/3b9018b2-8e63-4c80-8c71-cc9fa7ddb853-kube-api-access-t9rhv\") pod \"openshift-controller-manager-operator-756b6f6bc6-zgw8m\" (UID: \"3b9018b2-8e63-4c80-8c71-cc9fa7ddb853\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874834 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrtv2\" (UniqueName: \"kubernetes.io/projected/52652793-dcbd-4568-a32e-b727818c61a8-kube-api-access-xrtv2\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9n62\" (UID: \"52652793-dcbd-4568-a32e-b727818c61a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874867 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qvpd\" (UniqueName: \"kubernetes.io/projected/4c622447-70d1-4b27-b09c-6fa6402f632c-kube-api-access-8qvpd\") pod \"package-server-manager-789f6589d5-4xj5z\" (UID: \"4c622447-70d1-4b27-b09c-6fa6402f632c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874890 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52652793-dcbd-4568-a32e-b727818c61a8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9n62\" (UID: \"52652793-dcbd-4568-a32e-b727818c61a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874918 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52652793-dcbd-4568-a32e-b727818c61a8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9n62\" (UID: \"52652793-dcbd-4568-a32e-b727818c61a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874948 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f063d15e-fe05-46d3-8304-72da67594ea6-config\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.874979 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49hhm\" (UniqueName: \"kubernetes.io/projected/2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5-kube-api-access-49hhm\") pod \"ingress-operator-5b745b69d9-cqdk9\" (UID: \"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.875016 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-cqdk9\" (UID: \"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.875044 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b9018b2-8e63-4c80-8c71-cc9fa7ddb853-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-zgw8m\" (UID: \"3b9018b2-8e63-4c80-8c71-cc9fa7ddb853\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.875081 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgqps\" (UniqueName: \"kubernetes.io/projected/f063d15e-fe05-46d3-8304-72da67594ea6-kube-api-access-dgqps\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.875440 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/f063d15e-fe05-46d3-8304-72da67594ea6-etcd-ca\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.876080 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/f063d15e-fe05-46d3-8304-72da67594ea6-etcd-service-ca\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.876832 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f063d15e-fe05-46d3-8304-72da67594ea6-config\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.877154 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.877197 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-vl4tj\" (UID: \"c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.878778 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f063d15e-fe05-46d3-8304-72da67594ea6-serving-cert\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.879596 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f063d15e-fe05-46d3-8304-72da67594ea6-etcd-client\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.898264 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.916964 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.928383 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b9018b2-8e63-4c80-8c71-cc9fa7ddb853-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-zgw8m\" (UID: \"3b9018b2-8e63-4c80-8c71-cc9fa7ddb853\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.937257 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.947622 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b9018b2-8e63-4c80-8c71-cc9fa7ddb853-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-zgw8m\" (UID: \"3b9018b2-8e63-4c80-8c71-cc9fa7ddb853\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.958896 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.993762 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4wk4\" (UniqueName: \"kubernetes.io/projected/ccb9403f-6f1d-4bd9-a18b-b90404cff1a0-kube-api-access-b4wk4\") pod \"apiserver-7bbb656c7d-p29lm\" (UID: \"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:54 crc kubenswrapper[4787]: I0127 07:53:54.997952 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.017140 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.037097 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.057992 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.076954 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.097622 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.113014 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.121922 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.138139 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.157794 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.176856 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.196827 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.235058 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.247536 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.257843 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5-trusted-ca\") pod \"ingress-operator-5b745b69d9-cqdk9\" (UID: \"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.260305 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.277064 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.295462 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5-metrics-tls\") pod \"ingress-operator-5b745b69d9-cqdk9\" (UID: \"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.304636 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.317409 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.330736 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3-proxy-tls\") pod \"machine-config-controller-84d6567774-vl4tj\" (UID: \"c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.339535 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.351241 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm"] Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.357918 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 27 07:53:55 crc kubenswrapper[4787]: W0127 07:53:55.359887 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podccb9403f_6f1d_4bd9_a18b_b90404cff1a0.slice/crio-f13f654e3279ce9d1ef580ebd7cf1cd02ccc09d334fb2570726c7eaafa2c7dda WatchSource:0}: Error finding container f13f654e3279ce9d1ef580ebd7cf1cd02ccc09d334fb2570726c7eaafa2c7dda: Status 404 returned error can't find the container with id f13f654e3279ce9d1ef580ebd7cf1cd02ccc09d334fb2570726c7eaafa2c7dda Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.376673 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.398494 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.418316 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.436403 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.457439 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.478034 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.497106 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.518040 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.538329 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.557997 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.569673 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c71976d9-f7ec-4258-8bf5-08a526607fd9-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wdvfw\" (UID: \"c71976d9-f7ec-4258-8bf5-08a526607fd9\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wdvfw" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.577126 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.598041 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.608370 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4c622447-70d1-4b27-b09c-6fa6402f632c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-4xj5z\" (UID: \"4c622447-70d1-4b27-b09c-6fa6402f632c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.625614 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.636928 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.657058 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.677394 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.695108 4787 request.go:700] Waited for 1.000917057s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-storage-version-migrator-operator/secrets?fieldSelector=metadata.name%3Dserving-cert&limit=500&resourceVersion=0 Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.697664 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.710054 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52652793-dcbd-4568-a32e-b727818c61a8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9n62\" (UID: \"52652793-dcbd-4568-a32e-b727818c61a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.717520 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.727085 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52652793-dcbd-4568-a32e-b727818c61a8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9n62\" (UID: \"52652793-dcbd-4568-a32e-b727818c61a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.738021 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.757712 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.777040 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.796504 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.817682 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.837530 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.857032 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.876282 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.902367 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.903007 4787 generic.go:334] "Generic (PLEG): container finished" podID="ccb9403f-6f1d-4bd9-a18b-b90404cff1a0" containerID="4db902cdd724c33c548d97b5991fae101f4cbc5f095950a14187e89bb41c4acc" exitCode=0 Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.903078 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" event={"ID":"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0","Type":"ContainerDied","Data":"4db902cdd724c33c548d97b5991fae101f4cbc5f095950a14187e89bb41c4acc"} Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.903133 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" event={"ID":"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0","Type":"ContainerStarted","Data":"f13f654e3279ce9d1ef580ebd7cf1cd02ccc09d334fb2570726c7eaafa2c7dda"} Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.917803 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.938370 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.957755 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.977672 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 27 07:53:55 crc kubenswrapper[4787]: I0127 07:53:55.996124 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.018789 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.037191 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.057841 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.098153 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.118473 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.137505 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.156897 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.177234 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.196880 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.217049 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.238797 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.258009 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.276943 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.297639 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.317624 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.337743 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.357628 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.379864 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.398493 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.418247 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.438426 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.478545 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kgzr\" (UniqueName: \"kubernetes.io/projected/3ef9e285-88a8-499d-8fb8-e4c882336e68-kube-api-access-7kgzr\") pod \"machine-api-operator-5694c8668f-qtkc8\" (UID: \"3ef9e285-88a8-499d-8fb8-e4c882336e68\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.507811 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0a3473c0-d321-4be8-860f-bd51210cb58f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-ln7rp\" (UID: \"0a3473c0-d321-4be8-860f-bd51210cb58f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.517412 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cbr9\" (UniqueName: \"kubernetes.io/projected/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-kube-api-access-9cbr9\") pod \"console-f9d7485db-qptnb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.534714 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfjfh\" (UniqueName: \"kubernetes.io/projected/832859dc-03b3-4b2b-9a40-75372ebb38d9-kube-api-access-dfjfh\") pod \"openshift-apiserver-operator-796bbdcf4f-mnrsb\" (UID: \"832859dc-03b3-4b2b-9a40-75372ebb38d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.564067 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvmrj\" (UniqueName: \"kubernetes.io/projected/287286a8-80b2-4c95-948d-a096153d8e51-kube-api-access-mvmrj\") pod \"machine-approver-56656f9798-tfh8x\" (UID: \"287286a8-80b2-4c95-948d-a096153d8e51\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.566195 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.573713 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5qpv\" (UniqueName: \"kubernetes.io/projected/a1672707-9776-4274-9bdd-4f1dabf83038-kube-api-access-s5qpv\") pod \"openshift-config-operator-7777fb866f-hn2j2\" (UID: \"a1672707-9776-4274-9bdd-4f1dabf83038\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.594802 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5k9vf\" (UniqueName: \"kubernetes.io/projected/af87df13-9164-4e5e-99d4-5cbd9dfe80a5-kube-api-access-5k9vf\") pod \"console-operator-58897d9998-ctv89\" (UID: \"af87df13-9164-4e5e-99d4-5cbd9dfe80a5\") " pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.616324 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v9n9\" (UniqueName: \"kubernetes.io/projected/301e3f1a-19c5-47a7-b85d-81676098f971-kube-api-access-5v9n9\") pod \"downloads-7954f5f757-xwx4w\" (UID: \"301e3f1a-19c5-47a7-b85d-81676098f971\") " pod="openshift-console/downloads-7954f5f757-xwx4w" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.635272 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.645848 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp227\" (UniqueName: \"kubernetes.io/projected/0a3473c0-d321-4be8-860f-bd51210cb58f-kube-api-access-tp227\") pod \"cluster-image-registry-operator-dc59b4c8b-ln7rp\" (UID: \"0a3473c0-d321-4be8-860f-bd51210cb58f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.665662 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stktz\" (UniqueName: \"kubernetes.io/projected/0259775f-1fef-486a-bc17-4638e38ed83f-kube-api-access-stktz\") pod \"route-controller-manager-6576b87f9c-dlz8t\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.685881 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.693157 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfw62\" (UniqueName: \"kubernetes.io/projected/7c9507a4-925b-418e-b824-f338cd69a66e-kube-api-access-nfw62\") pod \"cluster-samples-operator-665b6dd947-q44w4\" (UID: \"7c9507a4-925b-418e-b824-f338cd69a66e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.694857 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjkkn\" (UniqueName: \"kubernetes.io/projected/daf6a704-5857-419a-bfaa-e2af3a05d386-kube-api-access-mjkkn\") pod \"authentication-operator-69f744f599-84k56\" (UID: \"daf6a704-5857-419a-bfaa-e2af3a05d386\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.697923 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.716582 4787 request.go:700] Waited for 1.927739054s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa/token Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.726957 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7r6d\" (UniqueName: \"kubernetes.io/projected/ba0745e4-7610-44e2-9a65-cd3875393d64-kube-api-access-m7r6d\") pod \"apiserver-76f77b778f-zkx8b\" (UID: \"ba0745e4-7610-44e2-9a65-cd3875393d64\") " pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.739331 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxmsq\" (UniqueName: \"kubernetes.io/projected/ca4c081d-896c-4cc8-9656-59364376de35-kube-api-access-kxmsq\") pod \"controller-manager-879f6c89f-g44f6\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.739957 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.759490 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.766254 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c54z8\" (UniqueName: \"kubernetes.io/projected/fcc64739-2fd2-4413-a19e-0bc14dd883d6-kube-api-access-c54z8\") pod \"oauth-openshift-558db77b4-wdppr\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.771698 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.778146 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.797415 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.799685 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-xwx4w" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.806097 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-qtkc8"] Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.814849 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.817116 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.839099 4787 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.841088 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.853525 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.861583 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.881126 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.914207 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" event={"ID":"3ef9e285-88a8-499d-8fb8-e4c882336e68","Type":"ContainerStarted","Data":"76117a7f274ff7c0cffedeefc995712695a6059ef8d3e764659f3a5314b4249d"} Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.927739 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" event={"ID":"ccb9403f-6f1d-4bd9-a18b-b90404cff1a0","Type":"ContainerStarted","Data":"8a1cf1b1b6790d6e608c8ba019ab5acb7570ef6c8ec5120887492e4023edb838"} Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.928235 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.936921 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.946062 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l85r5\" (UniqueName: \"kubernetes.io/projected/c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3-kube-api-access-l85r5\") pod \"machine-config-controller-84d6567774-vl4tj\" (UID: \"c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.946225 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" event={"ID":"287286a8-80b2-4c95-948d-a096153d8e51","Type":"ContainerStarted","Data":"2eb2a3bdfae348a5e8e146d4add3035381139e2dcbc66f1664dad4a8791c2480"} Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.957387 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.965175 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9rhv\" (UniqueName: \"kubernetes.io/projected/3b9018b2-8e63-4c80-8c71-cc9fa7ddb853-kube-api-access-t9rhv\") pod \"openshift-controller-manager-operator-756b6f6bc6-zgw8m\" (UID: \"3b9018b2-8e63-4c80-8c71-cc9fa7ddb853\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.965505 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.976516 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4" Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.978488 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb"] Jan 27 07:53:56 crc kubenswrapper[4787]: I0127 07:53:56.982597 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvsj6\" (UniqueName: \"kubernetes.io/projected/c71976d9-f7ec-4258-8bf5-08a526607fd9-kube-api-access-vvsj6\") pod \"control-plane-machine-set-operator-78cbb6b69f-wdvfw\" (UID: \"c71976d9-f7ec-4258-8bf5-08a526607fd9\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wdvfw" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.003393 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.007128 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrtv2\" (UniqueName: \"kubernetes.io/projected/52652793-dcbd-4568-a32e-b727818c61a8-kube-api-access-xrtv2\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9n62\" (UID: \"52652793-dcbd-4568-a32e-b727818c61a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.026052 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wdvfw" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.054911 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.057036 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.064281 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-cqdk9\" (UID: \"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.065198 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49hhm\" (UniqueName: \"kubernetes.io/projected/2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5-kube-api-access-49hhm\") pod \"ingress-operator-5b745b69d9-cqdk9\" (UID: \"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.067248 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qvpd\" (UniqueName: \"kubernetes.io/projected/4c622447-70d1-4b27-b09c-6fa6402f632c-kube-api-access-8qvpd\") pod \"package-server-manager-789f6589d5-4xj5z\" (UID: \"4c622447-70d1-4b27-b09c-6fa6402f632c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.073732 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgqps\" (UniqueName: \"kubernetes.io/projected/f063d15e-fe05-46d3-8304-72da67594ea6-kube-api-access-dgqps\") pod \"etcd-operator-b45778765-mnzjr\" (UID: \"f063d15e-fe05-46d3-8304-72da67594ea6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.114538 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124433 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjp2w\" (UniqueName: \"kubernetes.io/projected/320d8560-54f7-4505-aa98-5beb6c45505e-kube-api-access-rjp2w\") pod \"olm-operator-6b444d44fb-vm6dc\" (UID: \"320d8560-54f7-4505-aa98-5beb6c45505e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124478 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-registry-tls\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124498 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/320d8560-54f7-4505-aa98-5beb6c45505e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vm6dc\" (UID: \"320d8560-54f7-4505-aa98-5beb6c45505e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124516 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5f4a2300-d512-490d-a876-6ad03f0c2f31-service-ca-bundle\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124604 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d0e8826-188e-474f-9d5b-848886f95d1d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-j6mnr\" (UID: \"8d0e8826-188e-474f-9d5b-848886f95d1d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124621 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl2ds\" (UniqueName: \"kubernetes.io/projected/2e88d48e-6302-4a84-90a7-69446be90e4a-kube-api-access-nl2ds\") pod \"marketplace-operator-79b997595-h5d4g\" (UID: \"2e88d48e-6302-4a84-90a7-69446be90e4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124670 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/152af819-6586-4345-b2ef-cb8ad845a6b1-secret-volume\") pod \"collect-profiles-29491665-p955p\" (UID: \"152af819-6586-4345-b2ef-cb8ad845a6b1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124708 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/320d8560-54f7-4505-aa98-5beb6c45505e-srv-cert\") pod \"olm-operator-6b444d44fb-vm6dc\" (UID: \"320d8560-54f7-4505-aa98-5beb6c45505e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124725 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ad35acfc-9b56-41e7-87bd-b43f5c006dd5-metrics-tls\") pod \"dns-operator-744455d44c-t6xzm\" (UID: \"ad35acfc-9b56-41e7-87bd-b43f5c006dd5\") " pod="openshift-dns-operator/dns-operator-744455d44c-t6xzm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124742 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0260c856-7cbc-4fdd-887e-5b2403c05e04-node-bootstrap-token\") pod \"machine-config-server-9xlcr\" (UID: \"0260c856-7cbc-4fdd-887e-5b2403c05e04\") " pod="openshift-machine-config-operator/machine-config-server-9xlcr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124760 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7x5m\" (UniqueName: \"kubernetes.io/projected/66eeb178-2611-4361-962c-a399b1f243b0-kube-api-access-j7x5m\") pod \"catalog-operator-68c6474976-cgsgg\" (UID: \"66eeb178-2611-4361-962c-a399b1f243b0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124810 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7727572b-c246-4b22-b3eb-14275d3d25ee-serving-cert\") pod \"service-ca-operator-777779d784-xf9dv\" (UID: \"7727572b-c246-4b22-b3eb-14275d3d25ee\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124828 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-installation-pull-secrets\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124845 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1a7c228-ae7f-44c7-98df-78bc3949c528-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wv4z7\" (UID: \"c1a7c228-ae7f-44c7-98df-78bc3949c528\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124873 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8b5jc\" (UID: \"770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124891 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1a7c228-ae7f-44c7-98df-78bc3949c528-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wv4z7\" (UID: \"c1a7c228-ae7f-44c7-98df-78bc3949c528\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124907 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cd10b89a-565a-4ae6-b7dd-4bac183ece8c-images\") pod \"machine-config-operator-74547568cd-m7czl\" (UID: \"cd10b89a-565a-4ae6-b7dd-4bac183ece8c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124932 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8hpd\" (UniqueName: \"kubernetes.io/projected/5f4a2300-d512-490d-a876-6ad03f0c2f31-kube-api-access-h8hpd\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124960 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5f4a2300-d512-490d-a876-6ad03f0c2f31-default-certificate\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.124979 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wqwl\" (UniqueName: \"kubernetes.io/projected/be571415-6ab0-4b8f-a971-3225465af110-kube-api-access-2wqwl\") pod \"migrator-59844c95c7-h4cr4\" (UID: \"be571415-6ab0-4b8f-a971-3225465af110\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-h4cr4" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125011 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5f4a2300-d512-490d-a876-6ad03f0c2f31-stats-auth\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125029 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-registry-certificates\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125049 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0260c856-7cbc-4fdd-887e-5b2403c05e04-certs\") pod \"machine-config-server-9xlcr\" (UID: \"0260c856-7cbc-4fdd-887e-5b2403c05e04\") " pod="openshift-machine-config-operator/machine-config-server-9xlcr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125065 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5f4a2300-d512-490d-a876-6ad03f0c2f31-metrics-certs\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125084 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-ca-trust-extracted\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125102 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf-config\") pod \"kube-apiserver-operator-766d6c64bb-8b5jc\" (UID: \"770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125176 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8d0e8826-188e-474f-9d5b-848886f95d1d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-j6mnr\" (UID: \"8d0e8826-188e-474f-9d5b-848886f95d1d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125221 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2e88d48e-6302-4a84-90a7-69446be90e4a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-h5d4g\" (UID: \"2e88d48e-6302-4a84-90a7-69446be90e4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125247 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-bound-sa-token\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125270 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c1a7c228-ae7f-44c7-98df-78bc3949c528-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wv4z7\" (UID: \"c1a7c228-ae7f-44c7-98df-78bc3949c528\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125314 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d0e8826-188e-474f-9d5b-848886f95d1d-config\") pod \"kube-controller-manager-operator-78b949d7b-j6mnr\" (UID: \"8d0e8826-188e-474f-9d5b-848886f95d1d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125366 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cd10b89a-565a-4ae6-b7dd-4bac183ece8c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-m7czl\" (UID: \"cd10b89a-565a-4ae6-b7dd-4bac183ece8c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125388 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8b5jc\" (UID: \"770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125450 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw5zw\" (UniqueName: \"kubernetes.io/projected/7727572b-c246-4b22-b3eb-14275d3d25ee-kube-api-access-fw5zw\") pod \"service-ca-operator-777779d784-xf9dv\" (UID: \"7727572b-c246-4b22-b3eb-14275d3d25ee\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125476 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd9pb\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-kube-api-access-sd9pb\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125496 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/66eeb178-2611-4361-962c-a399b1f243b0-srv-cert\") pod \"catalog-operator-68c6474976-cgsgg\" (UID: \"66eeb178-2611-4361-962c-a399b1f243b0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125514 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/66eeb178-2611-4361-962c-a399b1f243b0-profile-collector-cert\") pod \"catalog-operator-68c6474976-cgsgg\" (UID: \"66eeb178-2611-4361-962c-a399b1f243b0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125537 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjxfr\" (UniqueName: \"kubernetes.io/projected/ad35acfc-9b56-41e7-87bd-b43f5c006dd5-kube-api-access-mjxfr\") pod \"dns-operator-744455d44c-t6xzm\" (UID: \"ad35acfc-9b56-41e7-87bd-b43f5c006dd5\") " pod="openshift-dns-operator/dns-operator-744455d44c-t6xzm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125610 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125635 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r6l9\" (UniqueName: \"kubernetes.io/projected/152af819-6586-4345-b2ef-cb8ad845a6b1-kube-api-access-9r6l9\") pod \"collect-profiles-29491665-p955p\" (UID: \"152af819-6586-4345-b2ef-cb8ad845a6b1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125682 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2e88d48e-6302-4a84-90a7-69446be90e4a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-h5d4g\" (UID: \"2e88d48e-6302-4a84-90a7-69446be90e4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125851 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdfkh\" (UniqueName: \"kubernetes.io/projected/cd10b89a-565a-4ae6-b7dd-4bac183ece8c-kube-api-access-vdfkh\") pod \"machine-config-operator-74547568cd-m7czl\" (UID: \"cd10b89a-565a-4ae6-b7dd-4bac183ece8c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125884 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sczk\" (UniqueName: \"kubernetes.io/projected/0260c856-7cbc-4fdd-887e-5b2403c05e04-kube-api-access-9sczk\") pod \"machine-config-server-9xlcr\" (UID: \"0260c856-7cbc-4fdd-887e-5b2403c05e04\") " pod="openshift-machine-config-operator/machine-config-server-9xlcr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125902 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cd10b89a-565a-4ae6-b7dd-4bac183ece8c-proxy-tls\") pod \"machine-config-operator-74547568cd-m7czl\" (UID: \"cd10b89a-565a-4ae6-b7dd-4bac183ece8c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.125995 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/152af819-6586-4345-b2ef-cb8ad845a6b1-config-volume\") pod \"collect-profiles-29491665-p955p\" (UID: \"152af819-6586-4345-b2ef-cb8ad845a6b1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.126034 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7727572b-c246-4b22-b3eb-14275d3d25ee-config\") pod \"service-ca-operator-777779d784-xf9dv\" (UID: \"7727572b-c246-4b22-b3eb-14275d3d25ee\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.126071 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-trusted-ca\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: E0127 07:53:57.127184 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:53:57.627170691 +0000 UTC m=+143.279526183 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229276 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229522 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7727572b-c246-4b22-b3eb-14275d3d25ee-serving-cert\") pod \"service-ca-operator-777779d784-xf9dv\" (UID: \"7727572b-c246-4b22-b3eb-14275d3d25ee\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229553 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7307207b-fd57-4efc-807b-7e0664873f73-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tx6fd\" (UID: \"7307207b-fd57-4efc-807b-7e0664873f73\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tx6fd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229606 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-installation-pull-secrets\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229626 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1a7c228-ae7f-44c7-98df-78bc3949c528-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wv4z7\" (UID: \"c1a7c228-ae7f-44c7-98df-78bc3949c528\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229656 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1a7c228-ae7f-44c7-98df-78bc3949c528-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wv4z7\" (UID: \"c1a7c228-ae7f-44c7-98df-78bc3949c528\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229674 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cd10b89a-565a-4ae6-b7dd-4bac183ece8c-images\") pod \"machine-config-operator-74547568cd-m7czl\" (UID: \"cd10b89a-565a-4ae6-b7dd-4bac183ece8c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229691 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8b5jc\" (UID: \"770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229722 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8hpd\" (UniqueName: \"kubernetes.io/projected/5f4a2300-d512-490d-a876-6ad03f0c2f31-kube-api-access-h8hpd\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229751 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5f4a2300-d512-490d-a876-6ad03f0c2f31-default-certificate\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229770 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd7w4\" (UniqueName: \"kubernetes.io/projected/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-kube-api-access-bd7w4\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229791 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0c6406d2-db7c-458f-9c56-a640eee1f327-signing-cabundle\") pod \"service-ca-9c57cc56f-crvsg\" (UID: \"0c6406d2-db7c-458f-9c56-a640eee1f327\") " pod="openshift-service-ca/service-ca-9c57cc56f-crvsg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229822 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-mountpoint-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229855 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wqwl\" (UniqueName: \"kubernetes.io/projected/be571415-6ab0-4b8f-a971-3225465af110-kube-api-access-2wqwl\") pod \"migrator-59844c95c7-h4cr4\" (UID: \"be571415-6ab0-4b8f-a971-3225465af110\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-h4cr4" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229884 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5f4a2300-d512-490d-a876-6ad03f0c2f31-stats-auth\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229905 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0260c856-7cbc-4fdd-887e-5b2403c05e04-certs\") pod \"machine-config-server-9xlcr\" (UID: \"0260c856-7cbc-4fdd-887e-5b2403c05e04\") " pod="openshift-machine-config-operator/machine-config-server-9xlcr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229923 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5f4a2300-d512-490d-a876-6ad03f0c2f31-metrics-certs\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229959 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-registry-certificates\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.229978 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-socket-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230018 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-ca-trust-extracted\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230035 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf-config\") pod \"kube-apiserver-operator-766d6c64bb-8b5jc\" (UID: \"770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230053 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8aecac31-d20f-4725-b563-b66c9288769d-metrics-tls\") pod \"dns-default-rkzlx\" (UID: \"8aecac31-d20f-4725-b563-b66c9288769d\") " pod="openshift-dns/dns-default-rkzlx" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230116 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrn4l\" (UniqueName: \"kubernetes.io/projected/7307207b-fd57-4efc-807b-7e0664873f73-kube-api-access-wrn4l\") pod \"multus-admission-controller-857f4d67dd-tx6fd\" (UID: \"7307207b-fd57-4efc-807b-7e0664873f73\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tx6fd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230137 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8d0e8826-188e-474f-9d5b-848886f95d1d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-j6mnr\" (UID: \"8d0e8826-188e-474f-9d5b-848886f95d1d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230158 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2e88d48e-6302-4a84-90a7-69446be90e4a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-h5d4g\" (UID: \"2e88d48e-6302-4a84-90a7-69446be90e4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230191 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-bound-sa-token\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230211 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rww65\" (UniqueName: \"kubernetes.io/projected/082e51f2-7ac3-4111-a40d-eb8498db9153-kube-api-access-rww65\") pod \"packageserver-d55dfcdfc-kqzdm\" (UID: \"082e51f2-7ac3-4111-a40d-eb8498db9153\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230242 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c1a7c228-ae7f-44c7-98df-78bc3949c528-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wv4z7\" (UID: \"c1a7c228-ae7f-44c7-98df-78bc3949c528\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230262 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d0e8826-188e-474f-9d5b-848886f95d1d-config\") pod \"kube-controller-manager-operator-78b949d7b-j6mnr\" (UID: \"8d0e8826-188e-474f-9d5b-848886f95d1d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230279 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0c6406d2-db7c-458f-9c56-a640eee1f327-signing-key\") pod \"service-ca-9c57cc56f-crvsg\" (UID: \"0c6406d2-db7c-458f-9c56-a640eee1f327\") " pod="openshift-service-ca/service-ca-9c57cc56f-crvsg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230305 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gq6w\" (UniqueName: \"kubernetes.io/projected/0c6406d2-db7c-458f-9c56-a640eee1f327-kube-api-access-9gq6w\") pod \"service-ca-9c57cc56f-crvsg\" (UID: \"0c6406d2-db7c-458f-9c56-a640eee1f327\") " pod="openshift-service-ca/service-ca-9c57cc56f-crvsg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230323 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8b5jc\" (UID: \"770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230351 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cd10b89a-565a-4ae6-b7dd-4bac183ece8c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-m7czl\" (UID: \"cd10b89a-565a-4ae6-b7dd-4bac183ece8c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230371 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9ce2a24d-e7e3-4c93-a46a-e914e248d6ae-cert\") pod \"ingress-canary-znxn2\" (UID: \"9ce2a24d-e7e3-4c93-a46a-e914e248d6ae\") " pod="openshift-ingress-canary/ingress-canary-znxn2" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230420 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw5zw\" (UniqueName: \"kubernetes.io/projected/7727572b-c246-4b22-b3eb-14275d3d25ee-kube-api-access-fw5zw\") pod \"service-ca-operator-777779d784-xf9dv\" (UID: \"7727572b-c246-4b22-b3eb-14275d3d25ee\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230444 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tczr8\" (UniqueName: \"kubernetes.io/projected/8aecac31-d20f-4725-b563-b66c9288769d-kube-api-access-tczr8\") pod \"dns-default-rkzlx\" (UID: \"8aecac31-d20f-4725-b563-b66c9288769d\") " pod="openshift-dns/dns-default-rkzlx" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230463 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd9pb\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-kube-api-access-sd9pb\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230499 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/66eeb178-2611-4361-962c-a399b1f243b0-srv-cert\") pod \"catalog-operator-68c6474976-cgsgg\" (UID: \"66eeb178-2611-4361-962c-a399b1f243b0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230519 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/66eeb178-2611-4361-962c-a399b1f243b0-profile-collector-cert\") pod \"catalog-operator-68c6474976-cgsgg\" (UID: \"66eeb178-2611-4361-962c-a399b1f243b0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230538 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjxfr\" (UniqueName: \"kubernetes.io/projected/ad35acfc-9b56-41e7-87bd-b43f5c006dd5-kube-api-access-mjxfr\") pod \"dns-operator-744455d44c-t6xzm\" (UID: \"ad35acfc-9b56-41e7-87bd-b43f5c006dd5\") " pod="openshift-dns-operator/dns-operator-744455d44c-t6xzm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230588 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r6l9\" (UniqueName: \"kubernetes.io/projected/152af819-6586-4345-b2ef-cb8ad845a6b1-kube-api-access-9r6l9\") pod \"collect-profiles-29491665-p955p\" (UID: \"152af819-6586-4345-b2ef-cb8ad845a6b1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230606 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2e88d48e-6302-4a84-90a7-69446be90e4a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-h5d4g\" (UID: \"2e88d48e-6302-4a84-90a7-69446be90e4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230666 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/082e51f2-7ac3-4111-a40d-eb8498db9153-tmpfs\") pod \"packageserver-d55dfcdfc-kqzdm\" (UID: \"082e51f2-7ac3-4111-a40d-eb8498db9153\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230711 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-plugins-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230764 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8aecac31-d20f-4725-b563-b66c9288769d-config-volume\") pod \"dns-default-rkzlx\" (UID: \"8aecac31-d20f-4725-b563-b66c9288769d\") " pod="openshift-dns/dns-default-rkzlx" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230806 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sczk\" (UniqueName: \"kubernetes.io/projected/0260c856-7cbc-4fdd-887e-5b2403c05e04-kube-api-access-9sczk\") pod \"machine-config-server-9xlcr\" (UID: \"0260c856-7cbc-4fdd-887e-5b2403c05e04\") " pod="openshift-machine-config-operator/machine-config-server-9xlcr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230826 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cd10b89a-565a-4ae6-b7dd-4bac183ece8c-proxy-tls\") pod \"machine-config-operator-74547568cd-m7czl\" (UID: \"cd10b89a-565a-4ae6-b7dd-4bac183ece8c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230846 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdfkh\" (UniqueName: \"kubernetes.io/projected/cd10b89a-565a-4ae6-b7dd-4bac183ece8c-kube-api-access-vdfkh\") pod \"machine-config-operator-74547568cd-m7czl\" (UID: \"cd10b89a-565a-4ae6-b7dd-4bac183ece8c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230882 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/152af819-6586-4345-b2ef-cb8ad845a6b1-config-volume\") pod \"collect-profiles-29491665-p955p\" (UID: \"152af819-6586-4345-b2ef-cb8ad845a6b1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230914 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-trusted-ca\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230933 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7727572b-c246-4b22-b3eb-14275d3d25ee-config\") pod \"service-ca-operator-777779d784-xf9dv\" (UID: \"7727572b-c246-4b22-b3eb-14275d3d25ee\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.230963 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjp2w\" (UniqueName: \"kubernetes.io/projected/320d8560-54f7-4505-aa98-5beb6c45505e-kube-api-access-rjp2w\") pod \"olm-operator-6b444d44fb-vm6dc\" (UID: \"320d8560-54f7-4505-aa98-5beb6c45505e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231001 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-registry-tls\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231020 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5f4a2300-d512-490d-a876-6ad03f0c2f31-service-ca-bundle\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231038 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/320d8560-54f7-4505-aa98-5beb6c45505e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vm6dc\" (UID: \"320d8560-54f7-4505-aa98-5beb6c45505e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231055 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/082e51f2-7ac3-4111-a40d-eb8498db9153-apiservice-cert\") pod \"packageserver-d55dfcdfc-kqzdm\" (UID: \"082e51f2-7ac3-4111-a40d-eb8498db9153\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231076 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl2ds\" (UniqueName: \"kubernetes.io/projected/2e88d48e-6302-4a84-90a7-69446be90e4a-kube-api-access-nl2ds\") pod \"marketplace-operator-79b997595-h5d4g\" (UID: \"2e88d48e-6302-4a84-90a7-69446be90e4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231095 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d0e8826-188e-474f-9d5b-848886f95d1d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-j6mnr\" (UID: \"8d0e8826-188e-474f-9d5b-848886f95d1d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231117 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2rb9\" (UniqueName: \"kubernetes.io/projected/9ce2a24d-e7e3-4c93-a46a-e914e248d6ae-kube-api-access-x2rb9\") pod \"ingress-canary-znxn2\" (UID: \"9ce2a24d-e7e3-4c93-a46a-e914e248d6ae\") " pod="openshift-ingress-canary/ingress-canary-znxn2" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231144 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-registration-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231161 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/082e51f2-7ac3-4111-a40d-eb8498db9153-webhook-cert\") pod \"packageserver-d55dfcdfc-kqzdm\" (UID: \"082e51f2-7ac3-4111-a40d-eb8498db9153\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231181 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/152af819-6586-4345-b2ef-cb8ad845a6b1-secret-volume\") pod \"collect-profiles-29491665-p955p\" (UID: \"152af819-6586-4345-b2ef-cb8ad845a6b1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231200 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/320d8560-54f7-4505-aa98-5beb6c45505e-srv-cert\") pod \"olm-operator-6b444d44fb-vm6dc\" (UID: \"320d8560-54f7-4505-aa98-5beb6c45505e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231220 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ad35acfc-9b56-41e7-87bd-b43f5c006dd5-metrics-tls\") pod \"dns-operator-744455d44c-t6xzm\" (UID: \"ad35acfc-9b56-41e7-87bd-b43f5c006dd5\") " pod="openshift-dns-operator/dns-operator-744455d44c-t6xzm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231237 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-csi-data-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231259 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0260c856-7cbc-4fdd-887e-5b2403c05e04-node-bootstrap-token\") pod \"machine-config-server-9xlcr\" (UID: \"0260c856-7cbc-4fdd-887e-5b2403c05e04\") " pod="openshift-machine-config-operator/machine-config-server-9xlcr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.231287 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7x5m\" (UniqueName: \"kubernetes.io/projected/66eeb178-2611-4361-962c-a399b1f243b0-kube-api-access-j7x5m\") pod \"catalog-operator-68c6474976-cgsgg\" (UID: \"66eeb178-2611-4361-962c-a399b1f243b0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" Jan 27 07:53:57 crc kubenswrapper[4787]: E0127 07:53:57.232713 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:53:57.732696737 +0000 UTC m=+143.385052229 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.241543 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/152af819-6586-4345-b2ef-cb8ad845a6b1-config-volume\") pod \"collect-profiles-29491665-p955p\" (UID: \"152af819-6586-4345-b2ef-cb8ad845a6b1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.242589 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-ca-trust-extracted\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.243910 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-trusted-ca\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.244887 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf-config\") pod \"kube-apiserver-operator-766d6c64bb-8b5jc\" (UID: \"770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.245212 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1a7c228-ae7f-44c7-98df-78bc3949c528-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wv4z7\" (UID: \"c1a7c228-ae7f-44c7-98df-78bc3949c528\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.246337 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5f4a2300-d512-490d-a876-6ad03f0c2f31-service-ca-bundle\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.249138 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-registry-tls\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.250434 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cd10b89a-565a-4ae6-b7dd-4bac183ece8c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-m7czl\" (UID: \"cd10b89a-565a-4ae6-b7dd-4bac183ece8c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.250539 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d0e8826-188e-474f-9d5b-848886f95d1d-config\") pod \"kube-controller-manager-operator-78b949d7b-j6mnr\" (UID: \"8d0e8826-188e-474f-9d5b-848886f95d1d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.250699 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-registry-certificates\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.251590 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cd10b89a-565a-4ae6-b7dd-4bac183ece8c-images\") pod \"machine-config-operator-74547568cd-m7czl\" (UID: \"cd10b89a-565a-4ae6-b7dd-4bac183ece8c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.253043 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-installation-pull-secrets\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.254748 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.256151 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7727572b-c246-4b22-b3eb-14275d3d25ee-config\") pod \"service-ca-operator-777779d784-xf9dv\" (UID: \"7727572b-c246-4b22-b3eb-14275d3d25ee\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.256725 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7727572b-c246-4b22-b3eb-14275d3d25ee-serving-cert\") pod \"service-ca-operator-777779d784-xf9dv\" (UID: \"7727572b-c246-4b22-b3eb-14275d3d25ee\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.258917 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2e88d48e-6302-4a84-90a7-69446be90e4a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-h5d4g\" (UID: \"2e88d48e-6302-4a84-90a7-69446be90e4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.266420 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2e88d48e-6302-4a84-90a7-69446be90e4a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-h5d4g\" (UID: \"2e88d48e-6302-4a84-90a7-69446be90e4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.271038 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/66eeb178-2611-4361-962c-a399b1f243b0-srv-cert\") pod \"catalog-operator-68c6474976-cgsgg\" (UID: \"66eeb178-2611-4361-962c-a399b1f243b0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.274158 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/320d8560-54f7-4505-aa98-5beb6c45505e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vm6dc\" (UID: \"320d8560-54f7-4505-aa98-5beb6c45505e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.277217 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d0e8826-188e-474f-9d5b-848886f95d1d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-j6mnr\" (UID: \"8d0e8826-188e-474f-9d5b-848886f95d1d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.284218 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/66eeb178-2611-4361-962c-a399b1f243b0-profile-collector-cert\") pod \"catalog-operator-68c6474976-cgsgg\" (UID: \"66eeb178-2611-4361-962c-a399b1f243b0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.284411 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl2ds\" (UniqueName: \"kubernetes.io/projected/2e88d48e-6302-4a84-90a7-69446be90e4a-kube-api-access-nl2ds\") pod \"marketplace-operator-79b997595-h5d4g\" (UID: \"2e88d48e-6302-4a84-90a7-69446be90e4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.285172 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0260c856-7cbc-4fdd-887e-5b2403c05e04-certs\") pod \"machine-config-server-9xlcr\" (UID: \"0260c856-7cbc-4fdd-887e-5b2403c05e04\") " pod="openshift-machine-config-operator/machine-config-server-9xlcr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.285456 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5f4a2300-d512-490d-a876-6ad03f0c2f31-stats-auth\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.286072 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cd10b89a-565a-4ae6-b7dd-4bac183ece8c-proxy-tls\") pod \"machine-config-operator-74547568cd-m7czl\" (UID: \"cd10b89a-565a-4ae6-b7dd-4bac183ece8c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.288787 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8b5jc\" (UID: \"770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.289973 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-qptnb"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.292286 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0260c856-7cbc-4fdd-887e-5b2403c05e04-node-bootstrap-token\") pod \"machine-config-server-9xlcr\" (UID: \"0260c856-7cbc-4fdd-887e-5b2403c05e04\") " pod="openshift-machine-config-operator/machine-config-server-9xlcr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.293223 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ad35acfc-9b56-41e7-87bd-b43f5c006dd5-metrics-tls\") pod \"dns-operator-744455d44c-t6xzm\" (UID: \"ad35acfc-9b56-41e7-87bd-b43f5c006dd5\") " pod="openshift-dns-operator/dns-operator-744455d44c-t6xzm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.293770 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/152af819-6586-4345-b2ef-cb8ad845a6b1-secret-volume\") pod \"collect-profiles-29491665-p955p\" (UID: \"152af819-6586-4345-b2ef-cb8ad845a6b1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.293914 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/320d8560-54f7-4505-aa98-5beb6c45505e-srv-cert\") pod \"olm-operator-6b444d44fb-vm6dc\" (UID: \"320d8560-54f7-4505-aa98-5beb6c45505e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.295834 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.296528 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1a7c228-ae7f-44c7-98df-78bc3949c528-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wv4z7\" (UID: \"c1a7c228-ae7f-44c7-98df-78bc3949c528\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.298416 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5f4a2300-d512-490d-a876-6ad03f0c2f31-default-certificate\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.300909 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5f4a2300-d512-490d-a876-6ad03f0c2f31-metrics-certs\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.303284 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd9pb\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-kube-api-access-sd9pb\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.315483 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wqwl\" (UniqueName: \"kubernetes.io/projected/be571415-6ab0-4b8f-a971-3225465af110-kube-api-access-2wqwl\") pod \"migrator-59844c95c7-h4cr4\" (UID: \"be571415-6ab0-4b8f-a971-3225465af110\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-h4cr4" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.338775 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7307207b-fd57-4efc-807b-7e0664873f73-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tx6fd\" (UID: \"7307207b-fd57-4efc-807b-7e0664873f73\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tx6fd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.354392 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd7w4\" (UniqueName: \"kubernetes.io/projected/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-kube-api-access-bd7w4\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.354430 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-mountpoint-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.354456 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0c6406d2-db7c-458f-9c56-a640eee1f327-signing-cabundle\") pod \"service-ca-9c57cc56f-crvsg\" (UID: \"0c6406d2-db7c-458f-9c56-a640eee1f327\") " pod="openshift-service-ca/service-ca-9c57cc56f-crvsg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.354504 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-socket-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.354537 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8aecac31-d20f-4725-b563-b66c9288769d-metrics-tls\") pod \"dns-default-rkzlx\" (UID: \"8aecac31-d20f-4725-b563-b66c9288769d\") " pod="openshift-dns/dns-default-rkzlx" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.354584 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrn4l\" (UniqueName: \"kubernetes.io/projected/7307207b-fd57-4efc-807b-7e0664873f73-kube-api-access-wrn4l\") pod \"multus-admission-controller-857f4d67dd-tx6fd\" (UID: \"7307207b-fd57-4efc-807b-7e0664873f73\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tx6fd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.354653 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rww65\" (UniqueName: \"kubernetes.io/projected/082e51f2-7ac3-4111-a40d-eb8498db9153-kube-api-access-rww65\") pod \"packageserver-d55dfcdfc-kqzdm\" (UID: \"082e51f2-7ac3-4111-a40d-eb8498db9153\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.354717 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0c6406d2-db7c-458f-9c56-a640eee1f327-signing-key\") pod \"service-ca-9c57cc56f-crvsg\" (UID: \"0c6406d2-db7c-458f-9c56-a640eee1f327\") " pod="openshift-service-ca/service-ca-9c57cc56f-crvsg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.354742 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gq6w\" (UniqueName: \"kubernetes.io/projected/0c6406d2-db7c-458f-9c56-a640eee1f327-kube-api-access-9gq6w\") pod \"service-ca-9c57cc56f-crvsg\" (UID: \"0c6406d2-db7c-458f-9c56-a640eee1f327\") " pod="openshift-service-ca/service-ca-9c57cc56f-crvsg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.354767 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-mountpoint-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.355756 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0c6406d2-db7c-458f-9c56-a640eee1f327-signing-cabundle\") pod \"service-ca-9c57cc56f-crvsg\" (UID: \"0c6406d2-db7c-458f-9c56-a640eee1f327\") " pod="openshift-service-ca/service-ca-9c57cc56f-crvsg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.355853 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7307207b-fd57-4efc-807b-7e0664873f73-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tx6fd\" (UID: \"7307207b-fd57-4efc-807b-7e0664873f73\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tx6fd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.345554 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.356401 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-socket-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.349881 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8d0e8826-188e-474f-9d5b-848886f95d1d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-j6mnr\" (UID: \"8d0e8826-188e-474f-9d5b-848886f95d1d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.354794 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9ce2a24d-e7e3-4c93-a46a-e914e248d6ae-cert\") pod \"ingress-canary-znxn2\" (UID: \"9ce2a24d-e7e3-4c93-a46a-e914e248d6ae\") " pod="openshift-ingress-canary/ingress-canary-znxn2" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.357368 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tczr8\" (UniqueName: \"kubernetes.io/projected/8aecac31-d20f-4725-b563-b66c9288769d-kube-api-access-tczr8\") pod \"dns-default-rkzlx\" (UID: \"8aecac31-d20f-4725-b563-b66c9288769d\") " pod="openshift-dns/dns-default-rkzlx" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.357465 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.357642 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/082e51f2-7ac3-4111-a40d-eb8498db9153-tmpfs\") pod \"packageserver-d55dfcdfc-kqzdm\" (UID: \"082e51f2-7ac3-4111-a40d-eb8498db9153\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.357699 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-plugins-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.357768 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8aecac31-d20f-4725-b563-b66c9288769d-config-volume\") pod \"dns-default-rkzlx\" (UID: \"8aecac31-d20f-4725-b563-b66c9288769d\") " pod="openshift-dns/dns-default-rkzlx" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.357904 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/082e51f2-7ac3-4111-a40d-eb8498db9153-apiservice-cert\") pod \"packageserver-d55dfcdfc-kqzdm\" (UID: \"082e51f2-7ac3-4111-a40d-eb8498db9153\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.357966 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2rb9\" (UniqueName: \"kubernetes.io/projected/9ce2a24d-e7e3-4c93-a46a-e914e248d6ae-kube-api-access-x2rb9\") pod \"ingress-canary-znxn2\" (UID: \"9ce2a24d-e7e3-4c93-a46a-e914e248d6ae\") " pod="openshift-ingress-canary/ingress-canary-znxn2" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.357997 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-registration-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.358035 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/082e51f2-7ac3-4111-a40d-eb8498db9153-webhook-cert\") pod \"packageserver-d55dfcdfc-kqzdm\" (UID: \"082e51f2-7ac3-4111-a40d-eb8498db9153\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.358078 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-csi-data-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: E0127 07:53:57.358125 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:53:57.858095124 +0000 UTC m=+143.510450616 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.358215 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-registration-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.358304 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-csi-data-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.358447 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-plugins-dir\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.358819 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/082e51f2-7ac3-4111-a40d-eb8498db9153-tmpfs\") pod \"packageserver-d55dfcdfc-kqzdm\" (UID: \"082e51f2-7ac3-4111-a40d-eb8498db9153\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.363816 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-h4cr4" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.364056 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/082e51f2-7ac3-4111-a40d-eb8498db9153-webhook-cert\") pod \"packageserver-d55dfcdfc-kqzdm\" (UID: \"082e51f2-7ac3-4111-a40d-eb8498db9153\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.368578 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/082e51f2-7ac3-4111-a40d-eb8498db9153-apiservice-cert\") pod \"packageserver-d55dfcdfc-kqzdm\" (UID: \"082e51f2-7ac3-4111-a40d-eb8498db9153\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.369002 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.382412 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-bound-sa-token\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.384793 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw5zw\" (UniqueName: \"kubernetes.io/projected/7727572b-c246-4b22-b3eb-14275d3d25ee-kube-api-access-fw5zw\") pod \"service-ca-operator-777779d784-xf9dv\" (UID: \"7727572b-c246-4b22-b3eb-14275d3d25ee\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.385374 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8aecac31-d20f-4725-b563-b66c9288769d-config-volume\") pod \"dns-default-rkzlx\" (UID: \"8aecac31-d20f-4725-b563-b66c9288769d\") " pod="openshift-dns/dns-default-rkzlx" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.385799 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9ce2a24d-e7e3-4c93-a46a-e914e248d6ae-cert\") pod \"ingress-canary-znxn2\" (UID: \"9ce2a24d-e7e3-4c93-a46a-e914e248d6ae\") " pod="openshift-ingress-canary/ingress-canary-znxn2" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.385946 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0c6406d2-db7c-458f-9c56-a640eee1f327-signing-key\") pod \"service-ca-9c57cc56f-crvsg\" (UID: \"0c6406d2-db7c-458f-9c56-a640eee1f327\") " pod="openshift-service-ca/service-ca-9c57cc56f-crvsg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.396801 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8b5jc\" (UID: \"770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.434131 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-84k56"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.434629 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-xwx4w"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.437662 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c1a7c228-ae7f-44c7-98df-78bc3949c528-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wv4z7\" (UID: \"c1a7c228-ae7f-44c7-98df-78bc3949c528\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.447210 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7x5m\" (UniqueName: \"kubernetes.io/projected/66eeb178-2611-4361-962c-a399b1f243b0-kube-api-access-j7x5m\") pod \"catalog-operator-68c6474976-cgsgg\" (UID: \"66eeb178-2611-4361-962c-a399b1f243b0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.453387 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8aecac31-d20f-4725-b563-b66c9288769d-metrics-tls\") pod \"dns-default-rkzlx\" (UID: \"8aecac31-d20f-4725-b563-b66c9288769d\") " pod="openshift-dns/dns-default-rkzlx" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.455473 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdfkh\" (UniqueName: \"kubernetes.io/projected/cd10b89a-565a-4ae6-b7dd-4bac183ece8c-kube-api-access-vdfkh\") pod \"machine-config-operator-74547568cd-m7czl\" (UID: \"cd10b89a-565a-4ae6-b7dd-4bac183ece8c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.459273 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:57 crc kubenswrapper[4787]: E0127 07:53:57.459420 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:53:57.959397784 +0000 UTC m=+143.611753276 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.460148 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: E0127 07:53:57.462407 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:53:57.962395975 +0000 UTC m=+143.614751467 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.479644 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8hpd\" (UniqueName: \"kubernetes.io/projected/5f4a2300-d512-490d-a876-6ad03f0c2f31-kube-api-access-h8hpd\") pod \"router-default-5444994796-jzdds\" (UID: \"5f4a2300-d512-490d-a876-6ad03f0c2f31\") " pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.498678 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sczk\" (UniqueName: \"kubernetes.io/projected/0260c856-7cbc-4fdd-887e-5b2403c05e04-kube-api-access-9sczk\") pod \"machine-config-server-9xlcr\" (UID: \"0260c856-7cbc-4fdd-887e-5b2403c05e04\") " pod="openshift-machine-config-operator/machine-config-server-9xlcr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.533520 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjxfr\" (UniqueName: \"kubernetes.io/projected/ad35acfc-9b56-41e7-87bd-b43f5c006dd5-kube-api-access-mjxfr\") pod \"dns-operator-744455d44c-t6xzm\" (UID: \"ad35acfc-9b56-41e7-87bd-b43f5c006dd5\") " pod="openshift-dns-operator/dns-operator-744455d44c-t6xzm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.540422 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r6l9\" (UniqueName: \"kubernetes.io/projected/152af819-6586-4345-b2ef-cb8ad845a6b1-kube-api-access-9r6l9\") pod \"collect-profiles-29491665-p955p\" (UID: \"152af819-6586-4345-b2ef-cb8ad845a6b1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.542664 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-t6xzm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.547874 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wdppr"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.561044 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.561288 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjp2w\" (UniqueName: \"kubernetes.io/projected/320d8560-54f7-4505-aa98-5beb6c45505e-kube-api-access-rjp2w\") pod \"olm-operator-6b444d44fb-vm6dc\" (UID: \"320d8560-54f7-4505-aa98-5beb6c45505e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" Jan 27 07:53:57 crc kubenswrapper[4787]: E0127 07:53:57.561609 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:53:58.061592537 +0000 UTC m=+143.713948029 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.561882 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-ctv89"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.571164 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.577821 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.594253 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd7w4\" (UniqueName: \"kubernetes.io/projected/f72f30c1-aeab-47d1-b353-5ea33af8eb6b-kube-api-access-bd7w4\") pod \"csi-hostpathplugin-mwnxd\" (UID: \"f72f30c1-aeab-47d1-b353-5ea33af8eb6b\") " pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.594649 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.610881 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.618070 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.619112 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rww65\" (UniqueName: \"kubernetes.io/projected/082e51f2-7ac3-4111-a40d-eb8498db9153-kube-api-access-rww65\") pod \"packageserver-d55dfcdfc-kqzdm\" (UID: \"082e51f2-7ac3-4111-a40d-eb8498db9153\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.632836 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.633172 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrn4l\" (UniqueName: \"kubernetes.io/projected/7307207b-fd57-4efc-807b-7e0664873f73-kube-api-access-wrn4l\") pod \"multus-admission-controller-857f4d67dd-tx6fd\" (UID: \"7307207b-fd57-4efc-807b-7e0664873f73\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tx6fd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.649811 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" Jan 27 07:53:57 crc kubenswrapper[4787]: W0127 07:53:57.656215 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod301e3f1a_19c5_47a7_b85d_81676098f971.slice/crio-6731ad2fbbabd13c861a61f57512d12b0f1187c3b7aa0f5df8f6ddfffbda1def WatchSource:0}: Error finding container 6731ad2fbbabd13c861a61f57512d12b0f1187c3b7aa0f5df8f6ddfffbda1def: Status 404 returned error can't find the container with id 6731ad2fbbabd13c861a61f57512d12b0f1187c3b7aa0f5df8f6ddfffbda1def Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.663323 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: E0127 07:53:57.663945 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:53:58.163920964 +0000 UTC m=+143.816276466 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.678748 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.680027 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tczr8\" (UniqueName: \"kubernetes.io/projected/8aecac31-d20f-4725-b563-b66c9288769d-kube-api-access-tczr8\") pod \"dns-default-rkzlx\" (UID: \"8aecac31-d20f-4725-b563-b66c9288769d\") " pod="openshift-dns/dns-default-rkzlx" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.696940 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gq6w\" (UniqueName: \"kubernetes.io/projected/0c6406d2-db7c-458f-9c56-a640eee1f327-kube-api-access-9gq6w\") pod \"service-ca-9c57cc56f-crvsg\" (UID: \"0c6406d2-db7c-458f-9c56-a640eee1f327\") " pod="openshift-service-ca/service-ca-9c57cc56f-crvsg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.697133 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-9xlcr" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.697953 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-crvsg" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.709630 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:53:57 crc kubenswrapper[4787]: W0127 07:53:57.711958 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf87df13_9164_4e5e_99d4_5cbd9dfe80a5.slice/crio-1253909e5ce3f2d18f3f22b50a5ae3f7abf4818cee9931b19e52db7e0faacf6f WatchSource:0}: Error finding container 1253909e5ce3f2d18f3f22b50a5ae3f7abf4818cee9931b19e52db7e0faacf6f: Status 404 returned error can't find the container with id 1253909e5ce3f2d18f3f22b50a5ae3f7abf4818cee9931b19e52db7e0faacf6f Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.721963 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-tx6fd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.730604 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2rb9\" (UniqueName: \"kubernetes.io/projected/9ce2a24d-e7e3-4c93-a46a-e914e248d6ae-kube-api-access-x2rb9\") pod \"ingress-canary-znxn2\" (UID: \"9ce2a24d-e7e3-4c93-a46a-e914e248d6ae\") " pod="openshift-ingress-canary/ingress-canary-znxn2" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.739447 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.741474 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.742072 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-rkzlx" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.748009 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-znxn2" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.749707 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zkx8b"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.765265 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:57 crc kubenswrapper[4787]: E0127 07:53:57.765436 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:53:58.26540961 +0000 UTC m=+143.917765102 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.765775 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: E0127 07:53:57.766185 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:53:58.266171109 +0000 UTC m=+143.918526601 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.784891 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.857843 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.868355 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:57 crc kubenswrapper[4787]: E0127 07:53:57.868500 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:53:58.368467396 +0000 UTC m=+144.020822888 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.868986 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:57 crc kubenswrapper[4787]: E0127 07:53:57.872534 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:53:58.372505946 +0000 UTC m=+144.024861438 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.891625 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.898670 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g44f6"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.900931 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4"] Jan 27 07:53:57 crc kubenswrapper[4787]: W0127 07:53:57.929310 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc7ed2e1c_c815_45e4_a5a1_e9006de2e8b3.slice/crio-4d9aee465b02072995de81211f3df545d4447c81d7c2d1218c594024fa8a620b WatchSource:0}: Error finding container 4d9aee465b02072995de81211f3df545d4447c81d7c2d1218c594024fa8a620b: Status 404 returned error can't find the container with id 4d9aee465b02072995de81211f3df545d4447c81d7c2d1218c594024fa8a620b Jan 27 07:53:57 crc kubenswrapper[4787]: W0127 07:53:57.930186 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca4c081d_896c_4cc8_9656_59364376de35.slice/crio-7128b0f800ba7ef260edfd0bbe8553abf59eec44713d7f401d1f64afeed28b63 WatchSource:0}: Error finding container 7128b0f800ba7ef260edfd0bbe8553abf59eec44713d7f401d1f64afeed28b63: Status 404 returned error can't find the container with id 7128b0f800ba7ef260edfd0bbe8553abf59eec44713d7f401d1f64afeed28b63 Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.945609 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.947453 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-h5d4g"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.974345 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m"] Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.974626 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:57 crc kubenswrapper[4787]: E0127 07:53:57.975172 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:53:58.475149995 +0000 UTC m=+144.127505487 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:57 crc kubenswrapper[4787]: W0127 07:53:57.980184 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a3473c0_d321_4be8_860f_bd51210cb58f.slice/crio-55117423bdc9eccf4aba0a0c979538af0f4d46f9fbff80211848862b36fddf17 WatchSource:0}: Error finding container 55117423bdc9eccf4aba0a0c979538af0f4d46f9fbff80211848862b36fddf17: Status 404 returned error can't find the container with id 55117423bdc9eccf4aba0a0c979538af0f4d46f9fbff80211848862b36fddf17 Jan 27 07:53:57 crc kubenswrapper[4787]: I0127 07:53:57.995320 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wdvfw"] Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.019697 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" event={"ID":"daf6a704-5857-419a-bfaa-e2af3a05d386","Type":"ContainerStarted","Data":"efc811c1725740bff5cb730eaca2c2912ccabab6aa80552f77f629591a8efadb"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.023900 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-mnzjr"] Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.081220 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:58 crc kubenswrapper[4787]: E0127 07:53:58.082630 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:53:58.582555893 +0000 UTC m=+144.234911385 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.086524 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" event={"ID":"0259775f-1fef-486a-bc17-4638e38ed83f","Type":"ContainerStarted","Data":"2b47bd63962bfe1d1f2626a0146931ca7d71c50c1e9a2b870fbb6f7f3fafcb6f"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.086667 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" event={"ID":"0259775f-1fef-486a-bc17-4638e38ed83f","Type":"ContainerStarted","Data":"41afcd3d4f1c7a6dc4b744f46a303d0903dfaf2cc98bd0b97df786bbe6ad0ce1"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.086752 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.089959 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-h4cr4"] Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.090237 4787 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-dlz8t container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.090321 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" podUID="0259775f-1fef-486a-bc17-4638e38ed83f" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Jan 27 07:53:58 crc kubenswrapper[4787]: W0127 07:53:58.093554 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2e88d48e_6302_4a84_90a7_69446be90e4a.slice/crio-080593f490256d1b9111cdbbbfb3bf578e73f07bd416e1297c741853a022c9d0 WatchSource:0}: Error finding container 080593f490256d1b9111cdbbbfb3bf578e73f07bd416e1297c741853a022c9d0: Status 404 returned error can't find the container with id 080593f490256d1b9111cdbbbfb3bf578e73f07bd416e1297c741853a022c9d0 Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.098398 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-qptnb" event={"ID":"b070600e-8a6f-4bb9-a1c2-e763f55d90eb","Type":"ContainerStarted","Data":"29d34fb56a5ea9d4aa885d1db94f1f656d15f31f013518446e7a295dc5daddbd"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.101034 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-xwx4w" event={"ID":"301e3f1a-19c5-47a7-b85d-81676098f971","Type":"ContainerStarted","Data":"6731ad2fbbabd13c861a61f57512d12b0f1187c3b7aa0f5df8f6ddfffbda1def"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.102999 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" event={"ID":"a1672707-9776-4274-9bdd-4f1dabf83038","Type":"ContainerStarted","Data":"a241f6eac7b737e993b87660c239179a420906d8f785dac6b9fd6e0bdd6c3bee"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.103030 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" event={"ID":"a1672707-9776-4274-9bdd-4f1dabf83038","Type":"ContainerStarted","Data":"3eee0f59d1eca1b9d315c94eeaac740410d9f704d52695edb13bb727c96e9822"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.107821 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-ctv89" event={"ID":"af87df13-9164-4e5e-99d4-5cbd9dfe80a5","Type":"ContainerStarted","Data":"1253909e5ce3f2d18f3f22b50a5ae3f7abf4818cee9931b19e52db7e0faacf6f"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.111589 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" event={"ID":"ba0745e4-7610-44e2-9a65-cd3875393d64","Type":"ContainerStarted","Data":"0ce20a753e35b3f0cbb05a1acb9083e2b9b1eb4f0f0e0922aab81e6d4e6a231b"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.112372 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" event={"ID":"ca4c081d-896c-4cc8-9656-59364376de35","Type":"ContainerStarted","Data":"7128b0f800ba7ef260edfd0bbe8553abf59eec44713d7f401d1f64afeed28b63"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.130701 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z"] Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.174780 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb" event={"ID":"832859dc-03b3-4b2b-9a40-75372ebb38d9","Type":"ContainerStarted","Data":"174043ae4eb3f27b2a02f8f0d458a2ba711820a4ca7d210e32ef6ae43cdd1bc9"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.174851 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb" event={"ID":"832859dc-03b3-4b2b-9a40-75372ebb38d9","Type":"ContainerStarted","Data":"7f42185c5aef7dce4e2e399f8d91331294ff845005ceee90fd08fcb40774b6f0"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.185850 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:58 crc kubenswrapper[4787]: E0127 07:53:58.188027 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:53:58.688002046 +0000 UTC m=+144.340357538 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.191618 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" event={"ID":"287286a8-80b2-4c95-948d-a096153d8e51","Type":"ContainerStarted","Data":"8c17a3ca80b645ddcd4da67a398d34fa5868490459331646e4490ab4755aca07"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.194416 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" event={"ID":"fcc64739-2fd2-4413-a19e-0bc14dd883d6","Type":"ContainerStarted","Data":"0586f60c3abf6a05bb8ec725bce60998408f815b2354ed56f05b7be6dbc58bdc"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.195887 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" event={"ID":"c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3","Type":"ContainerStarted","Data":"4d9aee465b02072995de81211f3df545d4447c81d7c2d1218c594024fa8a620b"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.199979 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" event={"ID":"3ef9e285-88a8-499d-8fb8-e4c882336e68","Type":"ContainerStarted","Data":"211ad6fc0e133b1fdefabe36246157cc21a82111703439f2ce1c112f11c8892e"} Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.254619 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-t6xzm"] Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.287874 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:58 crc kubenswrapper[4787]: E0127 07:53:58.289839 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:53:58.789821335 +0000 UTC m=+144.442176827 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:58 crc kubenswrapper[4787]: W0127 07:53:58.345801 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f4a2300_d512_490d_a876_6ad03f0c2f31.slice/crio-f447b97f79c24f1fa86dc633d9ade1d3765dd9b8fca0314cad47652ceecd0c98 WatchSource:0}: Error finding container f447b97f79c24f1fa86dc633d9ade1d3765dd9b8fca0314cad47652ceecd0c98: Status 404 returned error can't find the container with id f447b97f79c24f1fa86dc633d9ade1d3765dd9b8fca0314cad47652ceecd0c98 Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.394938 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:58 crc kubenswrapper[4787]: E0127 07:53:58.396736 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:53:58.896712383 +0000 UTC m=+144.549067875 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.503809 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:58 crc kubenswrapper[4787]: E0127 07:53:58.505648 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:53:59.005634736 +0000 UTC m=+144.657990228 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.532052 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" podStartSLOduration=119.532027528 podStartE2EDuration="1m59.532027528s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:58.530706939 +0000 UTC m=+144.183062431" watchObservedRunningTime="2026-01-27 07:53:58.532027528 +0000 UTC m=+144.184383030" Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.605140 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:58 crc kubenswrapper[4787]: E0127 07:53:58.614677 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:53:59.114635802 +0000 UTC m=+144.766991484 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.679927 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc"] Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.685209 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-crvsg"] Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.711980 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:58 crc kubenswrapper[4787]: E0127 07:53:58.712493 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:53:59.212469703 +0000 UTC m=+144.864825385 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.818403 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:58 crc kubenswrapper[4787]: E0127 07:53:58.820411 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:53:59.320371358 +0000 UTC m=+144.972726850 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.820974 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:58 crc kubenswrapper[4787]: E0127 07:53:58.822308 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:53:59.322279139 +0000 UTC m=+144.974634631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.917308 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv"] Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.923015 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:58 crc kubenswrapper[4787]: E0127 07:53:58.923475 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:53:59.423446804 +0000 UTC m=+145.075802296 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.930442 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p"] Jan 27 07:53:58 crc kubenswrapper[4787]: I0127 07:53:58.934416 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr"] Jan 27 07:53:58 crc kubenswrapper[4787]: W0127 07:53:58.944966 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod770c6bc0_a6c4_42b9_b21e_d1c36ed7b8bf.slice/crio-d296ddc6bf8d7b5468444ef4917e9aa0c22406552d50e8139100bebefd2fb14f WatchSource:0}: Error finding container d296ddc6bf8d7b5468444ef4917e9aa0c22406552d50e8139100bebefd2fb14f: Status 404 returned error can't find the container with id d296ddc6bf8d7b5468444ef4917e9aa0c22406552d50e8139100bebefd2fb14f Jan 27 07:53:58 crc kubenswrapper[4787]: W0127 07:53:58.946110 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c6406d2_db7c_458f_9c56_a640eee1f327.slice/crio-0dc71c20f37b8087257a9fe017b8da119943fdfe4921d6702b4b2d71f52e1025 WatchSource:0}: Error finding container 0dc71c20f37b8087257a9fe017b8da119943fdfe4921d6702b4b2d71f52e1025: Status 404 returned error can't find the container with id 0dc71c20f37b8087257a9fe017b8da119943fdfe4921d6702b4b2d71f52e1025 Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.024416 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:59 crc kubenswrapper[4787]: E0127 07:53:59.024879 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:53:59.524860458 +0000 UTC m=+145.177215950 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.131905 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:59 crc kubenswrapper[4787]: E0127 07:53:59.132668 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:53:59.632641169 +0000 UTC m=+145.284996661 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.157971 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-znxn2"] Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.205305 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" event={"ID":"152af819-6586-4345-b2ef-cb8ad845a6b1","Type":"ContainerStarted","Data":"dd1d962cbba22622cc8f00742deb5445d3f8c1e1a6d321d913c3f39997ffb3b5"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.212652 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z" event={"ID":"4c622447-70d1-4b27-b09c-6fa6402f632c","Type":"ContainerStarted","Data":"1b5bf904f5f1e8f79e8efc5a22b9f48d32b5fc1567f843d15b652083cc0e3ea5"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.214400 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" event={"ID":"2e88d48e-6302-4a84-90a7-69446be90e4a","Type":"ContainerStarted","Data":"080593f490256d1b9111cdbbbfb3bf578e73f07bd416e1297c741853a022c9d0"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.215708 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-crvsg" event={"ID":"0c6406d2-db7c-458f-9c56-a640eee1f327","Type":"ContainerStarted","Data":"0dc71c20f37b8087257a9fe017b8da119943fdfe4921d6702b4b2d71f52e1025"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.217668 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62" event={"ID":"52652793-dcbd-4568-a32e-b727818c61a8","Type":"ContainerStarted","Data":"4adb5d9bef3b963fd885398b5383883b4aaac905cc20d93dcd9e98bb57cfe2cc"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.221556 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-qptnb" event={"ID":"b070600e-8a6f-4bb9-a1c2-e763f55d90eb","Type":"ContainerStarted","Data":"310ccc96bfc18eaae7b0abfc190b26a49989ec96f4c419c7eab3252a1004a577"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.224035 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" event={"ID":"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5","Type":"ContainerStarted","Data":"76cab52707c7779aa6ca2f62da481161aabf68d198a8565ceee652ad4d69cfda"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.227275 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" event={"ID":"0a3473c0-d321-4be8-860f-bd51210cb58f","Type":"ContainerStarted","Data":"55117423bdc9eccf4aba0a0c979538af0f4d46f9fbff80211848862b36fddf17"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.235255 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:59 crc kubenswrapper[4787]: E0127 07:53:59.237416 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:53:59.737402747 +0000 UTC m=+145.389758239 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.240068 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc" event={"ID":"770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf","Type":"ContainerStarted","Data":"d296ddc6bf8d7b5468444ef4917e9aa0c22406552d50e8139100bebefd2fb14f"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.247376 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg"] Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.307502 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7"] Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.311144 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl"] Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.317951 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4" event={"ID":"7c9507a4-925b-418e-b824-f338cd69a66e","Type":"ContainerStarted","Data":"06fffec946eb75065833bf8dcef868c04446ce7888e95fb235bcfbf091cabc99"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.322864 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc"] Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.331870 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-9xlcr" event={"ID":"0260c856-7cbc-4fdd-887e-5b2403c05e04","Type":"ContainerStarted","Data":"6e504fe6448ffaf4082c3f9e3c16238131e32ca5463065c645aaaac7c9bda8a8"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.336985 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:59 crc kubenswrapper[4787]: E0127 07:53:59.337707 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:53:59.837686858 +0000 UTC m=+145.490042350 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.344332 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m" event={"ID":"3b9018b2-8e63-4c80-8c71-cc9fa7ddb853","Type":"ContainerStarted","Data":"e5bf6924b44ba6b2dd6b844e67c0f5dd6bfcc887f40629cb15547e94d18588c8"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.370782 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" event={"ID":"fcc64739-2fd2-4413-a19e-0bc14dd883d6","Type":"ContainerStarted","Data":"2d766c390986fb12647d9296b2ff8a7a88b1618b438430370daaded86b506218"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.371702 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.394033 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-rkzlx"] Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.394460 4787 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-wdppr container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.18:6443/healthz\": dial tcp 10.217.0.18:6443: connect: connection refused" start-of-body= Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.394545 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" podUID="fcc64739-2fd2-4413-a19e-0bc14dd883d6" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.18:6443/healthz\": dial tcp 10.217.0.18:6443: connect: connection refused" Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.422383 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr" event={"ID":"8d0e8826-188e-474f-9d5b-848886f95d1d","Type":"ContainerStarted","Data":"4a4dbca16def5f4b6df5963753724a39ea3f46877dae1dfff809374832b9f8ee"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.462766 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:59 crc kubenswrapper[4787]: E0127 07:53:59.463390 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:53:59.963375486 +0000 UTC m=+145.615730978 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.466937 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-xwx4w" event={"ID":"301e3f1a-19c5-47a7-b85d-81676098f971","Type":"ContainerStarted","Data":"a84ca8c27c52e9d15e05d7ca98315d7c9e0320b3daccf98569865c02ff90e75e"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.472692 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tx6fd"] Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.475334 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-h4cr4" event={"ID":"be571415-6ab0-4b8f-a971-3225465af110","Type":"ContainerStarted","Data":"e84cb2bc08f882da1b1db7f6845964928adbf9c01abbc37e3a8c94350e9ddbde"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.479592 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm"] Jan 27 07:53:59 crc kubenswrapper[4787]: W0127 07:53:59.482064 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7307207b_fd57_4efc_807b_7e0664873f73.slice/crio-f0e097bbddf99a5cd749191fafe89c657d9a183c594c798eb00ebbd8cd926a52 WatchSource:0}: Error finding container f0e097bbddf99a5cd749191fafe89c657d9a183c594c798eb00ebbd8cd926a52: Status 404 returned error can't find the container with id f0e097bbddf99a5cd749191fafe89c657d9a183c594c798eb00ebbd8cd926a52 Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.487720 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnrsb" podStartSLOduration=121.487699401 podStartE2EDuration="2m1.487699401s" podCreationTimestamp="2026-01-27 07:51:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:59.413600934 +0000 UTC m=+145.065956436" watchObservedRunningTime="2026-01-27 07:53:59.487699401 +0000 UTC m=+145.140054893" Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.491458 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mwnxd"] Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.495607 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-qptnb" podStartSLOduration=120.495579454 podStartE2EDuration="2m0.495579454s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:59.462241203 +0000 UTC m=+145.114596715" watchObservedRunningTime="2026-01-27 07:53:59.495579454 +0000 UTC m=+145.147934956" Jan 27 07:53:59 crc kubenswrapper[4787]: W0127 07:53:59.507724 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8aecac31_d20f_4725_b563_b66c9288769d.slice/crio-fcee71dd80f35d712ce58fb88f374f8864ba0291f70a83880cf45673e0387f68 WatchSource:0}: Error finding container fcee71dd80f35d712ce58fb88f374f8864ba0291f70a83880cf45673e0387f68: Status 404 returned error can't find the container with id fcee71dd80f35d712ce58fb88f374f8864ba0291f70a83880cf45673e0387f68 Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.508386 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wdvfw" event={"ID":"c71976d9-f7ec-4258-8bf5-08a526607fd9","Type":"ContainerStarted","Data":"10240c10423ac37b2e46f6cea0da36c5c4a81e6b1447823d6ef1b050af4ab495"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.508426 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wdvfw" event={"ID":"c71976d9-f7ec-4258-8bf5-08a526607fd9","Type":"ContainerStarted","Data":"51626a0e171c125a2efc326d5536f02b3cb13c75048e102d07ec3d28dd1d93be"} Jan 27 07:53:59 crc kubenswrapper[4787]: W0127 07:53:59.526443 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod082e51f2_7ac3_4111_a40d_eb8498db9153.slice/crio-e34965f2d6255a1e1c7d97589f31370ca5d0fbe322d30e475ba8e52245af1f1f WatchSource:0}: Error finding container e34965f2d6255a1e1c7d97589f31370ca5d0fbe322d30e475ba8e52245af1f1f: Status 404 returned error can't find the container with id e34965f2d6255a1e1c7d97589f31370ca5d0fbe322d30e475ba8e52245af1f1f Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.526681 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" event={"ID":"3ef9e285-88a8-499d-8fb8-e4c882336e68","Type":"ContainerStarted","Data":"66a65b0119f8122a1c534ff54390fff9ee7f15a69cfd1201d2b9050f003e051d"} Jan 27 07:53:59 crc kubenswrapper[4787]: W0127 07:53:59.537473 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf72f30c1_aeab_47d1_b353_5ea33af8eb6b.slice/crio-c39bdf5bbcb945f4a4464c0c5e574578ec8fec6904d0e3147e0c0840df9c02b1 WatchSource:0}: Error finding container c39bdf5bbcb945f4a4464c0c5e574578ec8fec6904d0e3147e0c0840df9c02b1: Status 404 returned error can't find the container with id c39bdf5bbcb945f4a4464c0c5e574578ec8fec6904d0e3147e0c0840df9c02b1 Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.541402 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-jzdds" event={"ID":"5f4a2300-d512-490d-a876-6ad03f0c2f31","Type":"ContainerStarted","Data":"f447b97f79c24f1fa86dc633d9ade1d3765dd9b8fca0314cad47652ceecd0c98"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.545037 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv" event={"ID":"7727572b-c246-4b22-b3eb-14275d3d25ee","Type":"ContainerStarted","Data":"ec8b003891116f6dd3ec5d857235c290937ddaa356d64a376a5c322f76b6e81f"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.549847 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" event={"ID":"a1672707-9776-4274-9bdd-4f1dabf83038","Type":"ContainerDied","Data":"a241f6eac7b737e993b87660c239179a420906d8f785dac6b9fd6e0bdd6c3bee"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.549805 4787 generic.go:334] "Generic (PLEG): container finished" podID="a1672707-9776-4274-9bdd-4f1dabf83038" containerID="a241f6eac7b737e993b87660c239179a420906d8f785dac6b9fd6e0bdd6c3bee" exitCode=0 Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.550161 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.554531 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-t6xzm" event={"ID":"ad35acfc-9b56-41e7-87bd-b43f5c006dd5","Type":"ContainerStarted","Data":"bc310fbc088a63b3a67f1c14ce3f1ec9954daeae6116c3ccad21af3b97a8d451"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.558293 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" event={"ID":"f063d15e-fe05-46d3-8304-72da67594ea6","Type":"ContainerStarted","Data":"5366bb02b1eb923307814fe987825369e24038367d69657c58f848bd86f88189"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.564815 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" event={"ID":"daf6a704-5857-419a-bfaa-e2af3a05d386","Type":"ContainerStarted","Data":"e4729d098a219b930fcb6407bf849e979362b747266527d8c556883588f57a0a"} Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.567478 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:59 crc kubenswrapper[4787]: E0127 07:53:59.569026 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.069003017 +0000 UTC m=+145.721358509 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.569100 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" podStartSLOduration=120.56908432 podStartE2EDuration="2m0.56908432s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:59.534219772 +0000 UTC m=+145.186575284" watchObservedRunningTime="2026-01-27 07:53:59.56908432 +0000 UTC m=+145.221439812" Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.583937 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.618535 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-xwx4w" podStartSLOduration=120.618513249 podStartE2EDuration="2m0.618513249s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:59.615705974 +0000 UTC m=+145.268061476" watchObservedRunningTime="2026-01-27 07:53:59.618513249 +0000 UTC m=+145.270868751" Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.628251 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wdvfw" podStartSLOduration=120.62822947 podStartE2EDuration="2m0.62822947s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:59.570524533 +0000 UTC m=+145.222880025" watchObservedRunningTime="2026-01-27 07:53:59.62822947 +0000 UTC m=+145.280584962" Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.660573 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-qtkc8" podStartSLOduration=120.660527642 podStartE2EDuration="2m0.660527642s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:59.660389667 +0000 UTC m=+145.312745159" watchObservedRunningTime="2026-01-27 07:53:59.660527642 +0000 UTC m=+145.312883134" Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.669862 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:59 crc kubenswrapper[4787]: E0127 07:53:59.672525 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.172507067 +0000 UTC m=+145.824862559 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.696754 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" podStartSLOduration=121.69673228 podStartE2EDuration="2m1.69673228s" podCreationTimestamp="2026-01-27 07:51:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:59.69488349 +0000 UTC m=+145.347238982" watchObservedRunningTime="2026-01-27 07:53:59.69673228 +0000 UTC m=+145.349087772" Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.771483 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:59 crc kubenswrapper[4787]: E0127 07:53:59.773038 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.273012088 +0000 UTC m=+145.925367580 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.776692 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:59 crc kubenswrapper[4787]: E0127 07:53:59.777238 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.277220534 +0000 UTC m=+145.929576026 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.778932 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-84k56" podStartSLOduration=121.778915637 podStartE2EDuration="2m1.778915637s" podCreationTimestamp="2026-01-27 07:51:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:59.732006392 +0000 UTC m=+145.384361884" watchObservedRunningTime="2026-01-27 07:53:59.778915637 +0000 UTC m=+145.431271129" Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.813306 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" podStartSLOduration=120.813279946 podStartE2EDuration="2m0.813279946s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:53:59.784989694 +0000 UTC m=+145.437345206" watchObservedRunningTime="2026-01-27 07:53:59.813279946 +0000 UTC m=+145.465635448" Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.877764 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:53:59 crc kubenswrapper[4787]: E0127 07:53:59.878135 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.378115879 +0000 UTC m=+146.030471371 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:53:59 crc kubenswrapper[4787]: I0127 07:53:59.981387 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:53:59 crc kubenswrapper[4787]: E0127 07:53:59.981898 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.48188167 +0000 UTC m=+146.134237162 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.082725 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.082988 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.582952782 +0000 UTC m=+146.235308264 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.083580 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.084020 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.58400196 +0000 UTC m=+146.236357452 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.114436 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.114532 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.151607 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.186223 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.186401 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.686358599 +0000 UTC m=+146.338714091 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.186672 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.188540 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.68851888 +0000 UTC m=+146.340874552 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.287887 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.288212 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.788170408 +0000 UTC m=+146.440525900 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.288457 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.288984 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.788967398 +0000 UTC m=+146.441322890 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.389716 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.389897 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.889870263 +0000 UTC m=+146.542225755 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.390464 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.391000 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.890980844 +0000 UTC m=+146.543336336 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.492102 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.492324 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.992303524 +0000 UTC m=+146.644659016 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.492815 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.493619 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:00.993580802 +0000 UTC m=+146.645936464 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.594081 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.594275 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:01.094245648 +0000 UTC m=+146.746601140 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.594439 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.594873 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:01.09485614 +0000 UTC m=+146.747211632 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.604767 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" event={"ID":"2e88d48e-6302-4a84-90a7-69446be90e4a","Type":"ContainerStarted","Data":"380d74a932668a4ea80abdd7fc668dad563b863ffa1063b090e363f00e977eef"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.606312 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.607922 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" event={"ID":"082e51f2-7ac3-4111-a40d-eb8498db9153","Type":"ContainerStarted","Data":"e34965f2d6255a1e1c7d97589f31370ca5d0fbe322d30e475ba8e52245af1f1f"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.618781 4787 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-h5d4g container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.618835 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" podUID="2e88d48e-6302-4a84-90a7-69446be90e4a" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.633803 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" event={"ID":"c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3","Type":"ContainerStarted","Data":"318390bc28506e850375d5d6709217fc4fe4d18dd8f418be899c4b70810854bf"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.658761 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62" event={"ID":"52652793-dcbd-4568-a32e-b727818c61a8","Type":"ContainerStarted","Data":"859f29a12886d96f6a5ad7a2e2b0d3affaf1e1e7dc80936a6ac1f321e4160902"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.669921 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7" event={"ID":"c1a7c228-ae7f-44c7-98df-78bc3949c528","Type":"ContainerStarted","Data":"3d53cdec71d4e98b1fcd5b22da98aa602f9db421f18576ea91c5b9909c09ca0b"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.678050 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" podStartSLOduration=121.678023556 podStartE2EDuration="2m1.678023556s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:00.655148234 +0000 UTC m=+146.307503746" watchObservedRunningTime="2026-01-27 07:54:00.678023556 +0000 UTC m=+146.330379048" Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.695953 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.697575 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:01.197511811 +0000 UTC m=+146.849867373 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.726446 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-ctv89" event={"ID":"af87df13-9164-4e5e-99d4-5cbd9dfe80a5","Type":"ContainerStarted","Data":"535dd7c269fc6359bf8e3595d1b63afe2342457c4c43110041049e99fec62a40"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.727234 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.738010 4787 patch_prober.go:28] interesting pod/console-operator-58897d9998-ctv89 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/readyz\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.738237 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-ctv89" podUID="af87df13-9164-4e5e-99d4-5cbd9dfe80a5" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.16:8443/readyz\": dial tcp 10.217.0.16:8443: connect: connection refused" Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.763866 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9n62" podStartSLOduration=121.763839639 podStartE2EDuration="2m1.763839639s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:00.68109813 +0000 UTC m=+146.333453622" watchObservedRunningTime="2026-01-27 07:54:00.763839639 +0000 UTC m=+146.416195131" Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.778361 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" event={"ID":"a1672707-9776-4274-9bdd-4f1dabf83038","Type":"ContainerStarted","Data":"1f79ff8a49a9e9948d2591fcf86582b17234487ec76130430371f8fb8eeb10ff"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.785887 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4" event={"ID":"7c9507a4-925b-418e-b824-f338cd69a66e","Type":"ContainerStarted","Data":"5f8513655a98fd8edfda812d7cb333015d5fac538cc236c7d0655eba4136e1b0"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.792741 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" event={"ID":"f72f30c1-aeab-47d1-b353-5ea33af8eb6b","Type":"ContainerStarted","Data":"c39bdf5bbcb945f4a4464c0c5e574578ec8fec6904d0e3147e0c0840df9c02b1"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.794431 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-ctv89" podStartSLOduration=121.794414057 podStartE2EDuration="2m1.794414057s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:00.762821221 +0000 UTC m=+146.415176723" watchObservedRunningTime="2026-01-27 07:54:00.794414057 +0000 UTC m=+146.446769549" Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.795664 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-crvsg" podStartSLOduration=121.795657263 podStartE2EDuration="2m1.795657263s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:00.794579683 +0000 UTC m=+146.446935185" watchObservedRunningTime="2026-01-27 07:54:00.795657263 +0000 UTC m=+146.448012755" Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.800034 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.810441 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:01.310401981 +0000 UTC m=+146.962757463 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.849539 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" event={"ID":"287286a8-80b2-4c95-948d-a096153d8e51","Type":"ContainerStarted","Data":"a52b1df0652ff9b38cd15dbf0bd00fd8fd9e3424531fd60b0f809e68cfd4499a"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.880331 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" event={"ID":"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5","Type":"ContainerStarted","Data":"31367b02d14f1a4db40e7164f0b2be9cfe37b9fc8e6babc123bc1322f1a52842"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.916037 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:00 crc kubenswrapper[4787]: E0127 07:54:00.917423 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:01.417404584 +0000 UTC m=+147.069760076 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.923988 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-jzdds" event={"ID":"5f4a2300-d512-490d-a876-6ad03f0c2f31","Type":"ContainerStarted","Data":"f177590393476ec974ed7a7d8600fdcd3567bffcaa8920db4a6320af2a86a07e"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.925975 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" podStartSLOduration=121.925958762 podStartE2EDuration="2m1.925958762s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:00.924411744 +0000 UTC m=+146.576767236" watchObservedRunningTime="2026-01-27 07:54:00.925958762 +0000 UTC m=+146.578314254" Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.927469 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tfh8x" podStartSLOduration=122.927459428 podStartE2EDuration="2m2.927459428s" podCreationTimestamp="2026-01-27 07:51:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:00.879852096 +0000 UTC m=+146.532207588" watchObservedRunningTime="2026-01-27 07:54:00.927459428 +0000 UTC m=+146.579814920" Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.936046 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-znxn2" event={"ID":"9ce2a24d-e7e3-4c93-a46a-e914e248d6ae","Type":"ContainerStarted","Data":"a48b707d16db1e654abc9e879479b6ada09c2e0a4a4fe8b3fef680c656651ce9"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.948345 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" event={"ID":"cd10b89a-565a-4ae6-b7dd-4bac183ece8c","Type":"ContainerStarted","Data":"9327b8bbc5ad1982bdfd7b430fa46b49d3679836617e29dcb3e639a47d21ece5"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.966233 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-jzdds" podStartSLOduration=121.966197638 podStartE2EDuration="2m1.966197638s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:00.958370378 +0000 UTC m=+146.610725870" watchObservedRunningTime="2026-01-27 07:54:00.966197638 +0000 UTC m=+146.618553140" Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.985465 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr" event={"ID":"8d0e8826-188e-474f-9d5b-848886f95d1d","Type":"ContainerStarted","Data":"22fdd5365b93b4f5204d87d1a4df2fd4735d1486142fdb0ee41ec9aa76c418be"} Jan 27 07:54:00 crc kubenswrapper[4787]: I0127 07:54:00.985886 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-znxn2" podStartSLOduration=6.98585293 podStartE2EDuration="6.98585293s" podCreationTimestamp="2026-01-27 07:53:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:00.985217407 +0000 UTC m=+146.637572909" watchObservedRunningTime="2026-01-27 07:54:00.98585293 +0000 UTC m=+146.638208422" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.001812 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-t6xzm" event={"ID":"ad35acfc-9b56-41e7-87bd-b43f5c006dd5","Type":"ContainerStarted","Data":"cd69ef4b0cf298876502ab6c4faf5bb19032d85098576b40e80ece77086b0042"} Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.015833 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" event={"ID":"66eeb178-2611-4361-962c-a399b1f243b0","Type":"ContainerStarted","Data":"d8dbb1a88704c4745c58d07ffc23c2222317fe5d4738aa3a87fd2d0f87d7b995"} Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.018625 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.019376 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:01 crc kubenswrapper[4787]: E0127 07:54:01.022784 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:01.522766214 +0000 UTC m=+147.175121706 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.025339 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-j6mnr" podStartSLOduration=122.025302719 podStartE2EDuration="2m2.025302719s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:01.016398937 +0000 UTC m=+146.668754429" watchObservedRunningTime="2026-01-27 07:54:01.025302719 +0000 UTC m=+146.677658211" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.030957 4787 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-cgsgg container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.031028 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" podUID="66eeb178-2611-4361-962c-a399b1f243b0" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.045780 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z" event={"ID":"4c622447-70d1-4b27-b09c-6fa6402f632c","Type":"ContainerStarted","Data":"c23a04a6732dc9cb95ffef73966fcc9a98e38e2ce2c0eb6cd3ab353422b67748"} Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.046477 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.084101 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" podStartSLOduration=122.084068346 podStartE2EDuration="2m2.084068346s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:01.069880638 +0000 UTC m=+146.722236130" watchObservedRunningTime="2026-01-27 07:54:01.084068346 +0000 UTC m=+146.736423838" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.102286 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z" podStartSLOduration=122.102256812 podStartE2EDuration="2m2.102256812s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:01.095992379 +0000 UTC m=+146.748347881" watchObservedRunningTime="2026-01-27 07:54:01.102256812 +0000 UTC m=+146.754612304" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.104932 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-9xlcr" event={"ID":"0260c856-7cbc-4fdd-887e-5b2403c05e04","Type":"ContainerStarted","Data":"d1e12cb428e4881ce482a6b3479d196778864959f4a6a6c9df83d10d4a90cbf0"} Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.106045 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m" event={"ID":"3b9018b2-8e63-4c80-8c71-cc9fa7ddb853","Type":"ContainerStarted","Data":"31bee4a2b62aeaca94b08e5898d5f668df4b7ec70d2def20eeb97ebd083f3ec3"} Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.122032 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.123318 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-rkzlx" event={"ID":"8aecac31-d20f-4725-b563-b66c9288769d","Type":"ContainerStarted","Data":"fcee71dd80f35d712ce58fb88f374f8864ba0291f70a83880cf45673e0387f68"} Jan 27 07:54:01 crc kubenswrapper[4787]: E0127 07:54:01.124782 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:01.624690727 +0000 UTC m=+147.277046229 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.127775 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" event={"ID":"f063d15e-fe05-46d3-8304-72da67594ea6","Type":"ContainerStarted","Data":"5a4e6084c8174d166cda259d0d40f41eecd29f190246bde8d4d400592447021d"} Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.130949 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-9xlcr" podStartSLOduration=7.130921599 podStartE2EDuration="7.130921599s" podCreationTimestamp="2026-01-27 07:53:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:01.13013314 +0000 UTC m=+146.782488632" watchObservedRunningTime="2026-01-27 07:54:01.130921599 +0000 UTC m=+146.783277091" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.162534 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zgw8m" podStartSLOduration=122.162504414 podStartE2EDuration="2m2.162504414s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:01.159083236 +0000 UTC m=+146.811438728" watchObservedRunningTime="2026-01-27 07:54:01.162504414 +0000 UTC m=+146.814859916" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.182010 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" event={"ID":"ca4c081d-896c-4cc8-9656-59364376de35","Type":"ContainerStarted","Data":"18d9ef1e7d3e588a5d939868f7c0a651f90760ac04dbbd15cc83a8ed490076a9"} Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.183445 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.209507 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-mnzjr" podStartSLOduration=122.209466881 podStartE2EDuration="2m2.209466881s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:01.19463122 +0000 UTC m=+146.846986722" watchObservedRunningTime="2026-01-27 07:54:01.209466881 +0000 UTC m=+146.861822573" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.210173 4787 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-g44f6 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.210358 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" podUID="ca4c081d-896c-4cc8-9656-59364376de35" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.224134 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:01 crc kubenswrapper[4787]: E0127 07:54:01.226491 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:01.726473674 +0000 UTC m=+147.378829156 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.227517 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" event={"ID":"152af819-6586-4345-b2ef-cb8ad845a6b1","Type":"ContainerStarted","Data":"4bba20fc10f4125f9489dd4dcdc6c569bd57f97297dffe6c9dba0e5f759c10ea"} Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.234016 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" podStartSLOduration=122.233988014 podStartE2EDuration="2m2.233988014s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:01.232190967 +0000 UTC m=+146.884546459" watchObservedRunningTime="2026-01-27 07:54:01.233988014 +0000 UTC m=+146.886343516" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.255152 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tx6fd" event={"ID":"7307207b-fd57-4efc-807b-7e0664873f73","Type":"ContainerStarted","Data":"f0e097bbddf99a5cd749191fafe89c657d9a183c594c798eb00ebbd8cd926a52"} Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.259989 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" podStartSLOduration=122.259970771 podStartE2EDuration="2m2.259970771s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:01.258249857 +0000 UTC m=+146.910605369" watchObservedRunningTime="2026-01-27 07:54:01.259970771 +0000 UTC m=+146.912326263" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.279739 4787 generic.go:334] "Generic (PLEG): container finished" podID="ba0745e4-7610-44e2-9a65-cd3875393d64" containerID="31ba9cc692eb702ff1a9bfd4d9a7e97905de373f690545545a78b36468be4e0f" exitCode=0 Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.279859 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" event={"ID":"ba0745e4-7610-44e2-9a65-cd3875393d64","Type":"ContainerDied","Data":"31ba9cc692eb702ff1a9bfd4d9a7e97905de373f690545545a78b36468be4e0f"} Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.311389 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" event={"ID":"320d8560-54f7-4505-aa98-5beb6c45505e","Type":"ContainerStarted","Data":"45f361e64b8125955ab791eeacbf8e8dc3d2aa01e1f798b908cbd3a73c38ec83"} Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.312179 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.319746 4787 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-vm6dc container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.320082 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" podUID="320d8560-54f7-4505-aa98-5beb6c45505e" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.322788 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" event={"ID":"0a3473c0-d321-4be8-860f-bd51210cb58f","Type":"ContainerStarted","Data":"07e49929be2877ba22a7f7942abf3e5f479a630fbb91b44620c30be0145e9051"} Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.326909 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:01 crc kubenswrapper[4787]: E0127 07:54:01.328692 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:01.828671367 +0000 UTC m=+147.481026859 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.376368 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" podStartSLOduration=122.376349891 podStartE2EDuration="2m2.376349891s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:01.361303842 +0000 UTC m=+147.013659344" watchObservedRunningTime="2026-01-27 07:54:01.376349891 +0000 UTC m=+147.028705383" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.383070 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-h4cr4" event={"ID":"be571415-6ab0-4b8f-a971-3225465af110","Type":"ContainerStarted","Data":"9b0831767f55750a9e2555383b80711ffdb05dab1ed7dbc48f8a84a58f3ad26e"} Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.385867 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-xwx4w" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.400457 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-p29lm" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.400758 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.428737 4787 patch_prober.go:28] interesting pod/downloads-7954f5f757-xwx4w container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.428819 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xwx4w" podUID="301e3f1a-19c5-47a7-b85d-81676098f971" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.428842 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:01 crc kubenswrapper[4787]: E0127 07:54:01.430317 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:01.930299199 +0000 UTC m=+147.582654681 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.531270 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:01 crc kubenswrapper[4787]: E0127 07:54:01.535009 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.034950604 +0000 UTC m=+147.687306096 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.583772 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ln7rp" podStartSLOduration=122.58374808 podStartE2EDuration="2m2.58374808s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:01.429660945 +0000 UTC m=+147.082016457" watchObservedRunningTime="2026-01-27 07:54:01.58374808 +0000 UTC m=+147.236103572" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.635353 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-h4cr4" podStartSLOduration=122.635318998 podStartE2EDuration="2m2.635318998s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:01.634342602 +0000 UTC m=+147.286698104" watchObservedRunningTime="2026-01-27 07:54:01.635318998 +0000 UTC m=+147.287674510" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.635390 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:01 crc kubenswrapper[4787]: E0127 07:54:01.635730 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.135716904 +0000 UTC m=+147.788072396 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.712819 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.742143 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:01 crc kubenswrapper[4787]: E0127 07:54:01.742513 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.242495077 +0000 UTC m=+147.894850569 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.742764 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:01 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:01 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:01 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.742802 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.846853 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:01 crc kubenswrapper[4787]: E0127 07:54:01.847290 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.347274416 +0000 UTC m=+147.999629908 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:01 crc kubenswrapper[4787]: I0127 07:54:01.959588 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:01 crc kubenswrapper[4787]: E0127 07:54:01.960423 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.460400126 +0000 UTC m=+148.112755608 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.061928 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:02 crc kubenswrapper[4787]: E0127 07:54:02.062419 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.56239163 +0000 UTC m=+148.214747302 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.163132 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:02 crc kubenswrapper[4787]: E0127 07:54:02.163376 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.663334766 +0000 UTC m=+148.315690268 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.163448 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:02 crc kubenswrapper[4787]: E0127 07:54:02.163931 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.663910889 +0000 UTC m=+148.316266541 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.265623 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:02 crc kubenswrapper[4787]: E0127 07:54:02.265860 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.765819591 +0000 UTC m=+148.418175083 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.266058 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:02 crc kubenswrapper[4787]: E0127 07:54:02.267081 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.767071498 +0000 UTC m=+148.419426990 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.276769 4787 csr.go:261] certificate signing request csr-7nwqm is approved, waiting to be issued Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.283069 4787 csr.go:257] certificate signing request csr-7nwqm is issued Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.367757 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:02 crc kubenswrapper[4787]: E0127 07:54:02.367987 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.867937921 +0000 UTC m=+148.520293403 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.368316 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:02 crc kubenswrapper[4787]: E0127 07:54:02.368879 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.868847524 +0000 UTC m=+148.521203016 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.392825 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-rkzlx" event={"ID":"8aecac31-d20f-4725-b563-b66c9288769d","Type":"ContainerStarted","Data":"6a6d83ddf1a9d8424c8026068dfc1e75d7527f2e2089ebb81cda023641b16d16"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.392902 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-rkzlx" event={"ID":"8aecac31-d20f-4725-b563-b66c9288769d","Type":"ContainerStarted","Data":"ab402b294004f3bec36b053aea58449461b62a4671b44090382136dfa2271fdb"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.393028 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-rkzlx" Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.394601 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-crvsg" event={"ID":"0c6406d2-db7c-458f-9c56-a640eee1f327","Type":"ContainerStarted","Data":"d7cb8e77e8378971a0bca8625d45c58d9ded641d783442f8f73e238b6ec2e2cf"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.396824 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-t6xzm" event={"ID":"ad35acfc-9b56-41e7-87bd-b43f5c006dd5","Type":"ContainerStarted","Data":"32a67935827d89a4fe33cdc18c8aea74d2ebc04eb4a16cf95e3aba2ecb240dcf"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.398404 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4" event={"ID":"7c9507a4-925b-418e-b824-f338cd69a66e","Type":"ContainerStarted","Data":"eb5bfc714559f49fe82f0eb2b8b18ce764d02898a035bcea361fbd63c963070a"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.400132 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" event={"ID":"f72f30c1-aeab-47d1-b353-5ea33af8eb6b","Type":"ContainerStarted","Data":"d2024e5365119c1c772a3ed14c0f1b5d27c4be2cf481dc22f626eb9880a910f9"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.402284 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z" event={"ID":"4c622447-70d1-4b27-b09c-6fa6402f632c","Type":"ContainerStarted","Data":"ca9f99093e71b2e75799d6a27256efbb3ca8f03bde03a626d83023f28c42337b"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.404253 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7" event={"ID":"c1a7c228-ae7f-44c7-98df-78bc3949c528","Type":"ContainerStarted","Data":"41f28742f7a955e738de3212cf60da12058bfff516102231cc9b487be3c97589"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.421975 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tx6fd" event={"ID":"7307207b-fd57-4efc-807b-7e0664873f73","Type":"ContainerStarted","Data":"139405013ba5fbe4e056c0418bee55f402c7f745c43bba09a23828946c1df1c7"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.422038 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tx6fd" event={"ID":"7307207b-fd57-4efc-807b-7e0664873f73","Type":"ContainerStarted","Data":"07db64620c5044eefec1cbffd8ab58e833d6c27fcef66ba74a1688f0d59a79bf"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.430118 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc" event={"ID":"770c6bc0-a6c4-42b9-b21e-d1c36ed7b8bf","Type":"ContainerStarted","Data":"d89a1a358ac2147616620dc9bdcb5050c6771cdd9a3460252094290a0085ef2b"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.453212 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" event={"ID":"ba0745e4-7610-44e2-9a65-cd3875393d64","Type":"ContainerStarted","Data":"9a9f2ac8b63c2c8caea215369cf38b58b23111f7fdcb958e05ae967809fb4ed6"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.473365 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:02 crc kubenswrapper[4787]: E0127 07:54:02.475190 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:02.975166141 +0000 UTC m=+148.627521633 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.473502 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" event={"ID":"66eeb178-2611-4361-962c-a399b1f243b0","Type":"ContainerStarted","Data":"916051aa46a52f6b2746a78401d84cfc2319af4bc4fd998eaaff2e7cfd697157"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.496912 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-cgsgg" Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.525718 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-h4cr4" event={"ID":"be571415-6ab0-4b8f-a971-3225465af110","Type":"ContainerStarted","Data":"a2252d9642541c57f78b6477d9422d38fe03c9bffb7c7af7da4c29faf6ced816"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.546305 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" event={"ID":"c7ed2e1c-c815-45e4-a5a1-e9006de2e8b3","Type":"ContainerStarted","Data":"193e26ff5142e27d793d997c1307ca5e20c75317341f426b60970d97236d7131"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.548982 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" event={"ID":"320d8560-54f7-4505-aa98-5beb6c45505e","Type":"ContainerStarted","Data":"005924651a6cb53de22088169d28a2182550178445f46faf130801e0657a7561"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.555683 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" event={"ID":"cd10b89a-565a-4ae6-b7dd-4bac183ece8c","Type":"ContainerStarted","Data":"d88e5055131efaed5f914ca26f1117658cd8a05cbea4828aa1b8f40ed095fd20"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.555835 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" event={"ID":"cd10b89a-565a-4ae6-b7dd-4bac183ece8c","Type":"ContainerStarted","Data":"7c4a012248b8e756d9e2e998b4271a6b0121bb1a9060285dbcd916824736d91f"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.556913 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv" event={"ID":"7727572b-c246-4b22-b3eb-14275d3d25ee","Type":"ContainerStarted","Data":"e0605f364a944b73985438110f75c81b37eb4b5d1b3c1b80020368cd9a3e179a"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.564864 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vm6dc" Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.566361 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqdk9" event={"ID":"2572ea5c-d57f-4bbc-b7a8-a72fcd4cced5","Type":"ContainerStarted","Data":"def445cc1484f1ddcb4c2f6818d89a9fb3a97ebbbffef677652806f57ae5bc89"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.570164 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" event={"ID":"082e51f2-7ac3-4111-a40d-eb8498db9153","Type":"ContainerStarted","Data":"38dfde05ce1a21d787aff575d63ca9743b7df7c4aef8dd76f2731118e18ecb9b"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.575550 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.576476 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:02 crc kubenswrapper[4787]: E0127 07:54:02.576910 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:03.076890246 +0000 UTC m=+148.729245908 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.580316 4787 patch_prober.go:28] interesting pod/downloads-7954f5f757-xwx4w container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.580371 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xwx4w" podUID="301e3f1a-19c5-47a7-b85d-81676098f971" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.580753 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-znxn2" event={"ID":"9ce2a24d-e7e3-4c93-a46a-e914e248d6ae","Type":"ContainerStarted","Data":"99cddda84a9c7b6f27f72416db2d8539fb7928211fca37e7debd56afa5e1c9f7"} Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.596115 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.600299 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-ctv89" Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.605904 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hn2j2" Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.631938 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.677602 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:02 crc kubenswrapper[4787]: E0127 07:54:02.677821 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:03.177785791 +0000 UTC m=+148.830141283 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.680528 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:02 crc kubenswrapper[4787]: E0127 07:54:02.683239 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:03.183217053 +0000 UTC m=+148.835572725 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.694496 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8b5jc" podStartSLOduration=123.694472042 podStartE2EDuration="2m3.694472042s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:02.691840164 +0000 UTC m=+148.344195656" watchObservedRunningTime="2026-01-27 07:54:02.694472042 +0000 UTC m=+148.346827534" Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.695733 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-rkzlx" podStartSLOduration=8.695723719 podStartE2EDuration="8.695723719s" podCreationTimestamp="2026-01-27 07:53:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:02.493054226 +0000 UTC m=+148.145409718" watchObservedRunningTime="2026-01-27 07:54:02.695723719 +0000 UTC m=+148.348079211" Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.723703 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:02 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:02 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:02 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.725035 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.796520 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:02 crc kubenswrapper[4787]: E0127 07:54:02.796799 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:03.296758018 +0000 UTC m=+148.949113510 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.797754 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:02 crc kubenswrapper[4787]: E0127 07:54:02.798267 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:03.298247224 +0000 UTC m=+148.950602716 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.830356 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-q44w4" podStartSLOduration=124.830325917 podStartE2EDuration="2m4.830325917s" podCreationTimestamp="2026-01-27 07:51:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:02.830191433 +0000 UTC m=+148.482546935" watchObservedRunningTime="2026-01-27 07:54:02.830325917 +0000 UTC m=+148.482681409" Jan 27 07:54:02 crc kubenswrapper[4787]: I0127 07:54:02.901494 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:02 crc kubenswrapper[4787]: E0127 07:54:02.901918 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:03.401894041 +0000 UTC m=+149.054249533 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.004332 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.004382 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.004405 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.004431 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.004493 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:54:03 crc kubenswrapper[4787]: E0127 07:54:03.006688 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:03.50666591 +0000 UTC m=+149.159021402 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.008377 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.014535 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.016330 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.029116 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.097064 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.105775 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:03 crc kubenswrapper[4787]: E0127 07:54:03.106311 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:03.606285746 +0000 UTC m=+149.258641238 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.108721 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.116250 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.127210 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-t6xzm" podStartSLOduration=124.127179984 podStartE2EDuration="2m4.127179984s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:02.947954365 +0000 UTC m=+148.600309857" watchObservedRunningTime="2026-01-27 07:54:03.127179984 +0000 UTC m=+148.779535476" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.129025 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wv4z7" podStartSLOduration=124.129015552 podStartE2EDuration="2m4.129015552s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:03.12304035 +0000 UTC m=+148.775395842" watchObservedRunningTime="2026-01-27 07:54:03.129015552 +0000 UTC m=+148.781371044" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.193631 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-tx6fd" podStartSLOduration=124.193608856 podStartE2EDuration="2m4.193608856s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:03.192423931 +0000 UTC m=+148.844779443" watchObservedRunningTime="2026-01-27 07:54:03.193608856 +0000 UTC m=+148.845964348" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.207147 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:03 crc kubenswrapper[4787]: E0127 07:54:03.207526 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:03.707513273 +0000 UTC m=+149.359868765 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.300776 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-27 07:49:02 +0000 UTC, rotation deadline is 2026-11-11 08:42:07.78994776 +0000 UTC Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.300869 4787 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6912h48m4.489081643s for next certificate rotation Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.308738 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:03 crc kubenswrapper[4787]: E0127 07:54:03.309662 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:03.809634203 +0000 UTC m=+149.461989695 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.411242 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:03 crc kubenswrapper[4787]: E0127 07:54:03.411647 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:03.911633129 +0000 UTC m=+149.563988621 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.488794 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xf9dv" podStartSLOduration=124.488770049 podStartE2EDuration="2m4.488770049s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:03.486198574 +0000 UTC m=+149.138554066" watchObservedRunningTime="2026-01-27 07:54:03.488770049 +0000 UTC m=+149.141125531" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.521055 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:03 crc kubenswrapper[4787]: E0127 07:54:03.521205 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:04.021174955 +0000 UTC m=+149.673530447 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.521517 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:03 crc kubenswrapper[4787]: E0127 07:54:03.522127 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:04.022090019 +0000 UTC m=+149.674445511 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.574136 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7czl" podStartSLOduration=124.574113925 podStartE2EDuration="2m4.574113925s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:03.57207547 +0000 UTC m=+149.224430972" watchObservedRunningTime="2026-01-27 07:54:03.574113925 +0000 UTC m=+149.226469417" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.576206 4787 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-kqzdm container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.576311 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" podUID="082e51f2-7ac3-4111-a40d-eb8498db9153" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.622488 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:03 crc kubenswrapper[4787]: E0127 07:54:03.622833 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:04.122812848 +0000 UTC m=+149.775168340 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.624838 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" event={"ID":"f72f30c1-aeab-47d1-b353-5ea33af8eb6b","Type":"ContainerStarted","Data":"8147fedbecc33c736afd3c6aa7d07546d68acce0f091c718e518023f8d45d6f7"} Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.630881 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" event={"ID":"ba0745e4-7610-44e2-9a65-cd3875393d64","Type":"ContainerStarted","Data":"c9f35cb6d57784ba7f0529a72cd25a8350d6c643eadaffd067ef0a9e8ee75698"} Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.650699 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vl4tj" podStartSLOduration=124.650662784 podStartE2EDuration="2m4.650662784s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:03.633398601 +0000 UTC m=+149.285754093" watchObservedRunningTime="2026-01-27 07:54:03.650662784 +0000 UTC m=+149.303018276" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.694766 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" podStartSLOduration=124.694747524 podStartE2EDuration="2m4.694747524s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:03.693102883 +0000 UTC m=+149.345458375" watchObservedRunningTime="2026-01-27 07:54:03.694747524 +0000 UTC m=+149.347103016" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.727718 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.740522 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:03 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:03 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:03 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.741591 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:03 crc kubenswrapper[4787]: E0127 07:54:03.746399 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:04.246379566 +0000 UTC m=+149.898735058 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.834910 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:03 crc kubenswrapper[4787]: E0127 07:54:03.835456 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:04.33543384 +0000 UTC m=+149.987789332 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:03 crc kubenswrapper[4787]: I0127 07:54:03.937714 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:03 crc kubenswrapper[4787]: E0127 07:54:03.938193 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:04.438174383 +0000 UTC m=+150.090529875 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.042355 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:04 crc kubenswrapper[4787]: E0127 07:54:04.043127 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:04.543110698 +0000 UTC m=+150.195466190 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.144592 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:04 crc kubenswrapper[4787]: E0127 07:54:04.145030 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:04.64501185 +0000 UTC m=+150.297367342 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.246425 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:04 crc kubenswrapper[4787]: E0127 07:54:04.246866 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:04.74684627 +0000 UTC m=+150.399201762 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.300610 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" podStartSLOduration=126.300584119 podStartE2EDuration="2m6.300584119s" podCreationTimestamp="2026-01-27 07:51:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:03.920049958 +0000 UTC m=+149.572405480" watchObservedRunningTime="2026-01-27 07:54:04.300584119 +0000 UTC m=+149.952939611" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.349941 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:04 crc kubenswrapper[4787]: E0127 07:54:04.350671 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:04.850655292 +0000 UTC m=+150.503010784 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.389861 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-npc57"] Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.392483 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.409601 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.424831 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-npc57"] Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.451625 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:04 crc kubenswrapper[4787]: E0127 07:54:04.452165 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:04.952147159 +0000 UTC m=+150.604502651 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.557621 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.557713 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7af45749-cd7f-490a-b2f6-bf979ea6467e-utilities\") pod \"certified-operators-npc57\" (UID: \"7af45749-cd7f-490a-b2f6-bf979ea6467e\") " pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.557750 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz5bs\" (UniqueName: \"kubernetes.io/projected/7af45749-cd7f-490a-b2f6-bf979ea6467e-kube-api-access-wz5bs\") pod \"certified-operators-npc57\" (UID: \"7af45749-cd7f-490a-b2f6-bf979ea6467e\") " pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.557776 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7af45749-cd7f-490a-b2f6-bf979ea6467e-catalog-content\") pod \"certified-operators-npc57\" (UID: \"7af45749-cd7f-490a-b2f6-bf979ea6467e\") " pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:04 crc kubenswrapper[4787]: E0127 07:54:04.558166 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:05.058151494 +0000 UTC m=+150.710506986 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.578460 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tnxzl"] Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.580099 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.586011 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.605280 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tnxzl"] Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.631084 4787 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-kqzdm container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.631184 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" podUID="082e51f2-7ac3-4111-a40d-eb8498db9153" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.661456 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:04 crc kubenswrapper[4787]: E0127 07:54:04.661568 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:05.161536641 +0000 UTC m=+150.813892133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.661813 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44cc5af2-0636-4589-a988-e7e32bfea075-utilities\") pod \"community-operators-tnxzl\" (UID: \"44cc5af2-0636-4589-a988-e7e32bfea075\") " pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.661869 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7af45749-cd7f-490a-b2f6-bf979ea6467e-utilities\") pod \"certified-operators-npc57\" (UID: \"7af45749-cd7f-490a-b2f6-bf979ea6467e\") " pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.661896 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz5bs\" (UniqueName: \"kubernetes.io/projected/7af45749-cd7f-490a-b2f6-bf979ea6467e-kube-api-access-wz5bs\") pod \"certified-operators-npc57\" (UID: \"7af45749-cd7f-490a-b2f6-bf979ea6467e\") " pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.661915 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7af45749-cd7f-490a-b2f6-bf979ea6467e-catalog-content\") pod \"certified-operators-npc57\" (UID: \"7af45749-cd7f-490a-b2f6-bf979ea6467e\") " pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.661945 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tmpp\" (UniqueName: \"kubernetes.io/projected/44cc5af2-0636-4589-a988-e7e32bfea075-kube-api-access-5tmpp\") pod \"community-operators-tnxzl\" (UID: \"44cc5af2-0636-4589-a988-e7e32bfea075\") " pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.661995 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.662018 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44cc5af2-0636-4589-a988-e7e32bfea075-catalog-content\") pod \"community-operators-tnxzl\" (UID: \"44cc5af2-0636-4589-a988-e7e32bfea075\") " pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.662445 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7af45749-cd7f-490a-b2f6-bf979ea6467e-utilities\") pod \"certified-operators-npc57\" (UID: \"7af45749-cd7f-490a-b2f6-bf979ea6467e\") " pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.662825 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7af45749-cd7f-490a-b2f6-bf979ea6467e-catalog-content\") pod \"certified-operators-npc57\" (UID: \"7af45749-cd7f-490a-b2f6-bf979ea6467e\") " pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:04 crc kubenswrapper[4787]: E0127 07:54:04.663057 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:05.163033587 +0000 UTC m=+150.815389079 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.674530 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"55076bce06be9fa0dc9f6c9adfcefb7226f9ea09b1332b9b63626a68a828e31a"} Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.678650 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"3cd49041d9face90fd6917b64fbf9e045896d6a0cf124e89038197ea750b3b90"} Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.679591 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"525470d42bb9b8a1c30570a7364c14ddadd46a66f4651607342d646e8d070258"} Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.696877 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" event={"ID":"f72f30c1-aeab-47d1-b353-5ea33af8eb6b","Type":"ContainerStarted","Data":"5142e1fc4ef05bc43569f915f30f7c73ec539c6e0ce5f921c8b907db94fca10d"} Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.708708 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz5bs\" (UniqueName: \"kubernetes.io/projected/7af45749-cd7f-490a-b2f6-bf979ea6467e-kube-api-access-wz5bs\") pod \"certified-operators-npc57\" (UID: \"7af45749-cd7f-490a-b2f6-bf979ea6467e\") " pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.731869 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:04 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:04 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:04 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.731929 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.735262 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-672cg"] Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.736475 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.763366 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.763657 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44cc5af2-0636-4589-a988-e7e32bfea075-catalog-content\") pod \"community-operators-tnxzl\" (UID: \"44cc5af2-0636-4589-a988-e7e32bfea075\") " pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.763718 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44cc5af2-0636-4589-a988-e7e32bfea075-utilities\") pod \"community-operators-tnxzl\" (UID: \"44cc5af2-0636-4589-a988-e7e32bfea075\") " pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.763918 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tmpp\" (UniqueName: \"kubernetes.io/projected/44cc5af2-0636-4589-a988-e7e32bfea075-kube-api-access-5tmpp\") pod \"community-operators-tnxzl\" (UID: \"44cc5af2-0636-4589-a988-e7e32bfea075\") " pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:54:04 crc kubenswrapper[4787]: E0127 07:54:04.765010 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:05.2649588 +0000 UTC m=+150.917314462 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.766088 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44cc5af2-0636-4589-a988-e7e32bfea075-catalog-content\") pod \"community-operators-tnxzl\" (UID: \"44cc5af2-0636-4589-a988-e7e32bfea075\") " pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.766305 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.766767 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44cc5af2-0636-4589-a988-e7e32bfea075-utilities\") pod \"community-operators-tnxzl\" (UID: \"44cc5af2-0636-4589-a988-e7e32bfea075\") " pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.772610 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kqzdm" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.823569 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tmpp\" (UniqueName: \"kubernetes.io/projected/44cc5af2-0636-4589-a988-e7e32bfea075-kube-api-access-5tmpp\") pod \"community-operators-tnxzl\" (UID: \"44cc5af2-0636-4589-a988-e7e32bfea075\") " pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.832084 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-672cg"] Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.865216 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqrrx\" (UniqueName: \"kubernetes.io/projected/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-kube-api-access-bqrrx\") pod \"certified-operators-672cg\" (UID: \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\") " pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.865303 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-utilities\") pod \"certified-operators-672cg\" (UID: \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\") " pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.865326 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-catalog-content\") pod \"certified-operators-672cg\" (UID: \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\") " pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.865362 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:04 crc kubenswrapper[4787]: E0127 07:54:04.865785 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:05.365766191 +0000 UTC m=+151.018121693 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.918382 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.928202 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cgwgp"] Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.929513 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.967107 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.967460 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqrrx\" (UniqueName: \"kubernetes.io/projected/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-kube-api-access-bqrrx\") pod \"certified-operators-672cg\" (UID: \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\") " pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:04 crc kubenswrapper[4787]: E0127 07:54:04.967603 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:05.467569759 +0000 UTC m=+151.119925251 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.967717 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-utilities\") pod \"certified-operators-672cg\" (UID: \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\") " pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.967764 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-catalog-content\") pod \"certified-operators-672cg\" (UID: \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\") " pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.967820 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:04 crc kubenswrapper[4787]: E0127 07:54:04.968420 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:05.46840974 +0000 UTC m=+151.120765232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.968428 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-utilities\") pod \"certified-operators-672cg\" (UID: \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\") " pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.968941 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-catalog-content\") pod \"certified-operators-672cg\" (UID: \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\") " pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:04 crc kubenswrapper[4787]: I0127 07:54:04.977670 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cgwgp"] Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.042318 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqrrx\" (UniqueName: \"kubernetes.io/projected/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-kube-api-access-bqrrx\") pod \"certified-operators-672cg\" (UID: \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\") " pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.070033 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.071045 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.071398 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5r7l\" (UniqueName: \"kubernetes.io/projected/0368ea79-94a8-42e3-8986-dddbec83d755-kube-api-access-z5r7l\") pod \"community-operators-cgwgp\" (UID: \"0368ea79-94a8-42e3-8986-dddbec83d755\") " pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.071425 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0368ea79-94a8-42e3-8986-dddbec83d755-catalog-content\") pod \"community-operators-cgwgp\" (UID: \"0368ea79-94a8-42e3-8986-dddbec83d755\") " pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.071490 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0368ea79-94a8-42e3-8986-dddbec83d755-utilities\") pod \"community-operators-cgwgp\" (UID: \"0368ea79-94a8-42e3-8986-dddbec83d755\") " pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:54:05 crc kubenswrapper[4787]: E0127 07:54:05.071621 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:05.57160102 +0000 UTC m=+151.223956512 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.176097 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0368ea79-94a8-42e3-8986-dddbec83d755-utilities\") pod \"community-operators-cgwgp\" (UID: \"0368ea79-94a8-42e3-8986-dddbec83d755\") " pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.176168 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5r7l\" (UniqueName: \"kubernetes.io/projected/0368ea79-94a8-42e3-8986-dddbec83d755-kube-api-access-z5r7l\") pod \"community-operators-cgwgp\" (UID: \"0368ea79-94a8-42e3-8986-dddbec83d755\") " pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.176198 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0368ea79-94a8-42e3-8986-dddbec83d755-catalog-content\") pod \"community-operators-cgwgp\" (UID: \"0368ea79-94a8-42e3-8986-dddbec83d755\") " pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.176238 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:05 crc kubenswrapper[4787]: E0127 07:54:05.176581 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:05.676546136 +0000 UTC m=+151.328901628 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.177495 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0368ea79-94a8-42e3-8986-dddbec83d755-utilities\") pod \"community-operators-cgwgp\" (UID: \"0368ea79-94a8-42e3-8986-dddbec83d755\") " pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.178271 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0368ea79-94a8-42e3-8986-dddbec83d755-catalog-content\") pod \"community-operators-cgwgp\" (UID: \"0368ea79-94a8-42e3-8986-dddbec83d755\") " pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.260326 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5r7l\" (UniqueName: \"kubernetes.io/projected/0368ea79-94a8-42e3-8986-dddbec83d755-kube-api-access-z5r7l\") pod \"community-operators-cgwgp\" (UID: \"0368ea79-94a8-42e3-8986-dddbec83d755\") " pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.279491 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:05 crc kubenswrapper[4787]: E0127 07:54:05.280104 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:05.780082987 +0000 UTC m=+151.432438479 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.379242 4787 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.382573 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:05 crc kubenswrapper[4787]: E0127 07:54:05.382988 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:05.882974567 +0000 UTC m=+151.535330059 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.483412 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:05 crc kubenswrapper[4787]: E0127 07:54:05.483906 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:05.983874091 +0000 UTC m=+151.636229583 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.528243 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-npc57"] Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.548264 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.588603 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:05 crc kubenswrapper[4787]: E0127 07:54:05.589260 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:06.089244552 +0000 UTC m=+151.741600034 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.696955 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:05 crc kubenswrapper[4787]: E0127 07:54:05.697455 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:06.197429859 +0000 UTC m=+151.849785351 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.699222 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-672cg"] Jan 27 07:54:05 crc kubenswrapper[4787]: W0127 07:54:05.715879 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a1ecf2a_6b91_48d7_873e_918dd4e045fc.slice/crio-b8aaeac82942157bbd1bdc8b6aa3df78bb8abe8a0a411a20aacb7836a96b1339 WatchSource:0}: Error finding container b8aaeac82942157bbd1bdc8b6aa3df78bb8abe8a0a411a20aacb7836a96b1339: Status 404 returned error can't find the container with id b8aaeac82942157bbd1bdc8b6aa3df78bb8abe8a0a411a20aacb7836a96b1339 Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.725902 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:05 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:05 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:05 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.725946 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.739891 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" event={"ID":"f72f30c1-aeab-47d1-b353-5ea33af8eb6b","Type":"ContainerStarted","Data":"b1ca9995e2add56c2a17127ab2ad642f5a17be1d9e3c4f15806046289c91bc8a"} Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.758708 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"7b945307886250632e5bd4040693345426f7bc80be3677f88f8e89e9dc77dc7a"} Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.773424 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"bd1a40d31617672448a6f5392eac59ecf9416a45d350f7dacd468d69e900a076"} Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.774308 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.776482 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tnxzl"] Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.788898 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"83d226bf4cbb8abdbbfb7a8b939bbf8d690037bd9cfe8bce754784e1de4650ec"} Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.798638 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:05 crc kubenswrapper[4787]: E0127 07:54:05.799993 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:06.299967264 +0000 UTC m=+151.952322756 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.801722 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-mwnxd" podStartSLOduration=11.801701879 podStartE2EDuration="11.801701879s" podCreationTimestamp="2026-01-27 07:53:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:05.800343348 +0000 UTC m=+151.452698860" watchObservedRunningTime="2026-01-27 07:54:05.801701879 +0000 UTC m=+151.454057371" Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.815092 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-npc57" event={"ID":"7af45749-cd7f-490a-b2f6-bf979ea6467e","Type":"ContainerStarted","Data":"d4eb89ad2e8044f8b7ec7f7b1e0a8d0bd6b479506668e725c471bfff9235d2ce"} Jan 27 07:54:05 crc kubenswrapper[4787]: I0127 07:54:05.901740 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:05 crc kubenswrapper[4787]: E0127 07:54:05.904419 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:06.404384959 +0000 UTC m=+152.056740591 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.003374 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:06 crc kubenswrapper[4787]: E0127 07:54:06.004311 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:06.504293358 +0000 UTC m=+152.156648850 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.116615 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:06 crc kubenswrapper[4787]: E0127 07:54:06.117706 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 07:54:06.617645166 +0000 UTC m=+152.270000658 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.169031 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cgwgp"] Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.221638 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:06 crc kubenswrapper[4787]: E0127 07:54:06.222164 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 07:54:06.722148874 +0000 UTC m=+152.374504366 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d44bj" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 07:54:06 crc kubenswrapper[4787]: E0127 07:54:06.225272 4787 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a1ecf2a_6b91_48d7_873e_918dd4e045fc.slice/crio-conmon-01d251eebd7c2252e3ae9b7ab01cffa2fc304155551715934787979ebd43ab3e.scope\": RecentStats: unable to find data in memory cache]" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.262684 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.263504 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.278946 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.279061 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.279139 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.301761 4787 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-27T07:54:05.379276149Z","Handler":null,"Name":""} Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.311570 4787 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.311625 4787 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.327334 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.327572 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/38edf9c9-43b3-4895-ac38-d33e35dd84ff-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"38edf9c9-43b3-4895-ac38-d33e35dd84ff\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.327634 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/38edf9c9-43b3-4895-ac38-d33e35dd84ff-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"38edf9c9-43b3-4895-ac38-d33e35dd84ff\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.336758 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.429302 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/38edf9c9-43b3-4895-ac38-d33e35dd84ff-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"38edf9c9-43b3-4895-ac38-d33e35dd84ff\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.429890 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.429935 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/38edf9c9-43b3-4895-ac38-d33e35dd84ff-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"38edf9c9-43b3-4895-ac38-d33e35dd84ff\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.429714 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/38edf9c9-43b3-4895-ac38-d33e35dd84ff-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"38edf9c9-43b3-4895-ac38-d33e35dd84ff\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.434515 4787 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.434593 4787 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.476468 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/38edf9c9-43b3-4895-ac38-d33e35dd84ff-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"38edf9c9-43b3-4895-ac38-d33e35dd84ff\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.523768 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ht6h9"] Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.524965 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.527758 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.550511 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ht6h9"] Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.590505 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d44bj\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.632819 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/624e9a13-c9c5-4ef3-8628-056bfc65338b-utilities\") pod \"redhat-marketplace-ht6h9\" (UID: \"624e9a13-c9c5-4ef3-8628-056bfc65338b\") " pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.632897 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkvcd\" (UniqueName: \"kubernetes.io/projected/624e9a13-c9c5-4ef3-8628-056bfc65338b-kube-api-access-nkvcd\") pod \"redhat-marketplace-ht6h9\" (UID: \"624e9a13-c9c5-4ef3-8628-056bfc65338b\") " pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.633285 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/624e9a13-c9c5-4ef3-8628-056bfc65338b-catalog-content\") pod \"redhat-marketplace-ht6h9\" (UID: \"624e9a13-c9c5-4ef3-8628-056bfc65338b\") " pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.665717 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.714178 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:06 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:06 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:06 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.714287 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.735179 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/624e9a13-c9c5-4ef3-8628-056bfc65338b-catalog-content\") pod \"redhat-marketplace-ht6h9\" (UID: \"624e9a13-c9c5-4ef3-8628-056bfc65338b\") " pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.735287 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/624e9a13-c9c5-4ef3-8628-056bfc65338b-utilities\") pod \"redhat-marketplace-ht6h9\" (UID: \"624e9a13-c9c5-4ef3-8628-056bfc65338b\") " pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.735329 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkvcd\" (UniqueName: \"kubernetes.io/projected/624e9a13-c9c5-4ef3-8628-056bfc65338b-kube-api-access-nkvcd\") pod \"redhat-marketplace-ht6h9\" (UID: \"624e9a13-c9c5-4ef3-8628-056bfc65338b\") " pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.736839 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/624e9a13-c9c5-4ef3-8628-056bfc65338b-catalog-content\") pod \"redhat-marketplace-ht6h9\" (UID: \"624e9a13-c9c5-4ef3-8628-056bfc65338b\") " pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.737086 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/624e9a13-c9c5-4ef3-8628-056bfc65338b-utilities\") pod \"redhat-marketplace-ht6h9\" (UID: \"624e9a13-c9c5-4ef3-8628-056bfc65338b\") " pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.742165 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.742264 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.749419 4787 patch_prober.go:28] interesting pod/console-f9d7485db-qptnb container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.749483 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-qptnb" podUID="b070600e-8a6f-4bb9-a1c2-e763f55d90eb" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.761526 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkvcd\" (UniqueName: \"kubernetes.io/projected/624e9a13-c9c5-4ef3-8628-056bfc65338b-kube-api-access-nkvcd\") pod \"redhat-marketplace-ht6h9\" (UID: \"624e9a13-c9c5-4ef3-8628-056bfc65338b\") " pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.801055 4787 patch_prober.go:28] interesting pod/downloads-7954f5f757-xwx4w container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.801126 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-xwx4w" podUID="301e3f1a-19c5-47a7-b85d-81676098f971" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.801167 4787 patch_prober.go:28] interesting pod/downloads-7954f5f757-xwx4w container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.801248 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xwx4w" podUID="301e3f1a-19c5-47a7-b85d-81676098f971" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.825168 4787 generic.go:334] "Generic (PLEG): container finished" podID="7af45749-cd7f-490a-b2f6-bf979ea6467e" containerID="ae2a0db24a496532988d8c9c232f3f25c682b2d988bff4fb3dec2a81481a0733" exitCode=0 Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.826172 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-npc57" event={"ID":"7af45749-cd7f-490a-b2f6-bf979ea6467e","Type":"ContainerDied","Data":"ae2a0db24a496532988d8c9c232f3f25c682b2d988bff4fb3dec2a81481a0733"} Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.828672 4787 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.831926 4787 generic.go:334] "Generic (PLEG): container finished" podID="0368ea79-94a8-42e3-8986-dddbec83d755" containerID="da0792067324438fea958c400a5fb011057b3895c2b4ec11d5379beb14d61d0d" exitCode=0 Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.832011 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cgwgp" event={"ID":"0368ea79-94a8-42e3-8986-dddbec83d755","Type":"ContainerDied","Data":"da0792067324438fea958c400a5fb011057b3895c2b4ec11d5379beb14d61d0d"} Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.832060 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cgwgp" event={"ID":"0368ea79-94a8-42e3-8986-dddbec83d755","Type":"ContainerStarted","Data":"7a180c063d8433886b9b99fe8382a8d973c8b6b4d1425416261139533b679d98"} Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.835668 4787 generic.go:334] "Generic (PLEG): container finished" podID="152af819-6586-4345-b2ef-cb8ad845a6b1" containerID="4bba20fc10f4125f9489dd4dcdc6c569bd57f97297dffe6c9dba0e5f759c10ea" exitCode=0 Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.835763 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" event={"ID":"152af819-6586-4345-b2ef-cb8ad845a6b1","Type":"ContainerDied","Data":"4bba20fc10f4125f9489dd4dcdc6c569bd57f97297dffe6c9dba0e5f759c10ea"} Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.838389 4787 generic.go:334] "Generic (PLEG): container finished" podID="44cc5af2-0636-4589-a988-e7e32bfea075" containerID="5ca52b27fd7e43c704f8e05cf0d0277c2c4f718ac46a1e5031c9baa689471895" exitCode=0 Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.838502 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tnxzl" event={"ID":"44cc5af2-0636-4589-a988-e7e32bfea075","Type":"ContainerDied","Data":"5ca52b27fd7e43c704f8e05cf0d0277c2c4f718ac46a1e5031c9baa689471895"} Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.838526 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tnxzl" event={"ID":"44cc5af2-0636-4589-a988-e7e32bfea075","Type":"ContainerStarted","Data":"81b4d3c6ee8e93f24d5baa8dea52cfb0d85fded6205579eedde915fa9ac03cb2"} Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.843823 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.847581 4787 generic.go:334] "Generic (PLEG): container finished" podID="2a1ecf2a-6b91-48d7-873e-918dd4e045fc" containerID="01d251eebd7c2252e3ae9b7ab01cffa2fc304155551715934787979ebd43ab3e" exitCode=0 Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.847763 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-672cg" event={"ID":"2a1ecf2a-6b91-48d7-873e-918dd4e045fc","Type":"ContainerDied","Data":"01d251eebd7c2252e3ae9b7ab01cffa2fc304155551715934787979ebd43ab3e"} Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.847796 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-672cg" event={"ID":"2a1ecf2a-6b91-48d7-873e-918dd4e045fc","Type":"ContainerStarted","Data":"b8aaeac82942157bbd1bdc8b6aa3df78bb8abe8a0a411a20aacb7836a96b1339"} Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.856411 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.897496 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.917988 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hz9l2"] Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.920391 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.934659 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hz9l2"] Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.968102 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.968134 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.995393 4787 patch_prober.go:28] interesting pod/apiserver-76f77b778f-zkx8b container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 27 07:54:06 crc kubenswrapper[4787]: [+]log ok Jan 27 07:54:06 crc kubenswrapper[4787]: [+]etcd ok Jan 27 07:54:06 crc kubenswrapper[4787]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 27 07:54:06 crc kubenswrapper[4787]: [+]poststarthook/generic-apiserver-start-informers ok Jan 27 07:54:06 crc kubenswrapper[4787]: [+]poststarthook/max-in-flight-filter ok Jan 27 07:54:06 crc kubenswrapper[4787]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 27 07:54:06 crc kubenswrapper[4787]: [+]poststarthook/image.openshift.io-apiserver-caches ok Jan 27 07:54:06 crc kubenswrapper[4787]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Jan 27 07:54:06 crc kubenswrapper[4787]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Jan 27 07:54:06 crc kubenswrapper[4787]: [+]poststarthook/project.openshift.io-projectcache ok Jan 27 07:54:06 crc kubenswrapper[4787]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Jan 27 07:54:06 crc kubenswrapper[4787]: [+]poststarthook/openshift.io-startinformers ok Jan 27 07:54:06 crc kubenswrapper[4787]: [+]poststarthook/openshift.io-restmapperupdater ok Jan 27 07:54:06 crc kubenswrapper[4787]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 27 07:54:06 crc kubenswrapper[4787]: livez check failed Jan 27 07:54:06 crc kubenswrapper[4787]: I0127 07:54:06.995633 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" podUID="ba0745e4-7610-44e2-9a65-cd3875393d64" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.043298 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/264daa3b-f5d9-407d-8835-35845525329a-utilities\") pod \"redhat-marketplace-hz9l2\" (UID: \"264daa3b-f5d9-407d-8835-35845525329a\") " pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.043925 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65qxh\" (UniqueName: \"kubernetes.io/projected/264daa3b-f5d9-407d-8835-35845525329a-kube-api-access-65qxh\") pod \"redhat-marketplace-hz9l2\" (UID: \"264daa3b-f5d9-407d-8835-35845525329a\") " pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.043982 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/264daa3b-f5d9-407d-8835-35845525329a-catalog-content\") pod \"redhat-marketplace-hz9l2\" (UID: \"264daa3b-f5d9-407d-8835-35845525329a\") " pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.097185 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.110873 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ht6h9"] Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.146433 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/264daa3b-f5d9-407d-8835-35845525329a-utilities\") pod \"redhat-marketplace-hz9l2\" (UID: \"264daa3b-f5d9-407d-8835-35845525329a\") " pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.146536 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65qxh\" (UniqueName: \"kubernetes.io/projected/264daa3b-f5d9-407d-8835-35845525329a-kube-api-access-65qxh\") pod \"redhat-marketplace-hz9l2\" (UID: \"264daa3b-f5d9-407d-8835-35845525329a\") " pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.146578 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/264daa3b-f5d9-407d-8835-35845525329a-catalog-content\") pod \"redhat-marketplace-hz9l2\" (UID: \"264daa3b-f5d9-407d-8835-35845525329a\") " pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.147135 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/264daa3b-f5d9-407d-8835-35845525329a-catalog-content\") pod \"redhat-marketplace-hz9l2\" (UID: \"264daa3b-f5d9-407d-8835-35845525329a\") " pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.147406 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/264daa3b-f5d9-407d-8835-35845525329a-utilities\") pod \"redhat-marketplace-hz9l2\" (UID: \"264daa3b-f5d9-407d-8835-35845525329a\") " pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.168488 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-d44bj"] Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.174214 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65qxh\" (UniqueName: \"kubernetes.io/projected/264daa3b-f5d9-407d-8835-35845525329a-kube-api-access-65qxh\") pod \"redhat-marketplace-hz9l2\" (UID: \"264daa3b-f5d9-407d-8835-35845525329a\") " pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.273907 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.507886 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kkjnl"] Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.511923 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.517366 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.535411 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kkjnl"] Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.647048 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hz9l2"] Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.653467 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e42281e-20ed-4105-866f-878ffbf6c6eb-utilities\") pod \"redhat-operators-kkjnl\" (UID: \"1e42281e-20ed-4105-866f-878ffbf6c6eb\") " pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.653881 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76lt5\" (UniqueName: \"kubernetes.io/projected/1e42281e-20ed-4105-866f-878ffbf6c6eb-kube-api-access-76lt5\") pod \"redhat-operators-kkjnl\" (UID: \"1e42281e-20ed-4105-866f-878ffbf6c6eb\") " pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.653937 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e42281e-20ed-4105-866f-878ffbf6c6eb-catalog-content\") pod \"redhat-operators-kkjnl\" (UID: \"1e42281e-20ed-4105-866f-878ffbf6c6eb\") " pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.713487 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.713533 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:07 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:07 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:07 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.714210 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.755988 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e42281e-20ed-4105-866f-878ffbf6c6eb-utilities\") pod \"redhat-operators-kkjnl\" (UID: \"1e42281e-20ed-4105-866f-878ffbf6c6eb\") " pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.756140 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76lt5\" (UniqueName: \"kubernetes.io/projected/1e42281e-20ed-4105-866f-878ffbf6c6eb-kube-api-access-76lt5\") pod \"redhat-operators-kkjnl\" (UID: \"1e42281e-20ed-4105-866f-878ffbf6c6eb\") " pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.756215 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e42281e-20ed-4105-866f-878ffbf6c6eb-catalog-content\") pod \"redhat-operators-kkjnl\" (UID: \"1e42281e-20ed-4105-866f-878ffbf6c6eb\") " pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.756751 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e42281e-20ed-4105-866f-878ffbf6c6eb-utilities\") pod \"redhat-operators-kkjnl\" (UID: \"1e42281e-20ed-4105-866f-878ffbf6c6eb\") " pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.756909 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e42281e-20ed-4105-866f-878ffbf6c6eb-catalog-content\") pod \"redhat-operators-kkjnl\" (UID: \"1e42281e-20ed-4105-866f-878ffbf6c6eb\") " pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.784211 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76lt5\" (UniqueName: \"kubernetes.io/projected/1e42281e-20ed-4105-866f-878ffbf6c6eb-kube-api-access-76lt5\") pod \"redhat-operators-kkjnl\" (UID: \"1e42281e-20ed-4105-866f-878ffbf6c6eb\") " pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.840307 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.862775 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ht6h9" event={"ID":"624e9a13-c9c5-4ef3-8628-056bfc65338b","Type":"ContainerDied","Data":"16105c08221c0a2bad9341270bf79d1c85ac086baa9a0934ed9322b6ec8a995e"} Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.862860 4787 generic.go:334] "Generic (PLEG): container finished" podID="624e9a13-c9c5-4ef3-8628-056bfc65338b" containerID="16105c08221c0a2bad9341270bf79d1c85ac086baa9a0934ed9322b6ec8a995e" exitCode=0 Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.863196 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ht6h9" event={"ID":"624e9a13-c9c5-4ef3-8628-056bfc65338b","Type":"ContainerStarted","Data":"c2980947d8538d4e3d4cb9e62697c884d7b7e8b6d4185152424643323c016212"} Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.873073 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hz9l2" event={"ID":"264daa3b-f5d9-407d-8835-35845525329a","Type":"ContainerStarted","Data":"34b029af2088374d8be804c9373b55e19265a67e5ebece55f9fd4a6ba2ab1d08"} Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.875626 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"38edf9c9-43b3-4895-ac38-d33e35dd84ff","Type":"ContainerStarted","Data":"f7112a8124ac99640f54d3193f30b49640a3e16a7e1b6909eba69d818ec9cb7b"} Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.875657 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"38edf9c9-43b3-4895-ac38-d33e35dd84ff","Type":"ContainerStarted","Data":"b76d9b5d789e59b56dccfabe215ff4d7bfa2fe6c0b0475134d18f24caf01b1e1"} Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.891141 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" event={"ID":"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0","Type":"ContainerStarted","Data":"661df021152bc7f98a651ccf560f2bb03e44a6e4242d1b20ac64c7c4a614aaf1"} Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.891199 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" event={"ID":"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0","Type":"ContainerStarted","Data":"f164a16895fc4826be32f420da3768fd0f293a4a904dce0ed637757e1fbcf513"} Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.902163 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qjd6t"] Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.903362 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.928004 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qjd6t"] Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.930642 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.9306204500000002 podStartE2EDuration="1.93062045s" podCreationTimestamp="2026-01-27 07:54:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:07.924472911 +0000 UTC m=+153.576828413" watchObservedRunningTime="2026-01-27 07:54:07.93062045 +0000 UTC m=+153.582975942" Jan 27 07:54:07 crc kubenswrapper[4787]: I0127 07:54:07.951923 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" podStartSLOduration=128.951897842 podStartE2EDuration="2m8.951897842s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:07.949869026 +0000 UTC m=+153.602224528" watchObservedRunningTime="2026-01-27 07:54:07.951897842 +0000 UTC m=+153.604253334" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.073104 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-catalog-content\") pod \"redhat-operators-qjd6t\" (UID: \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\") " pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.076061 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-utilities\") pod \"redhat-operators-qjd6t\" (UID: \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\") " pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.076132 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dfzs\" (UniqueName: \"kubernetes.io/projected/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-kube-api-access-8dfzs\") pod \"redhat-operators-qjd6t\" (UID: \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\") " pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.183892 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-utilities\") pod \"redhat-operators-qjd6t\" (UID: \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\") " pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.183964 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dfzs\" (UniqueName: \"kubernetes.io/projected/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-kube-api-access-8dfzs\") pod \"redhat-operators-qjd6t\" (UID: \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\") " pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.184221 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-catalog-content\") pod \"redhat-operators-qjd6t\" (UID: \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\") " pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.186028 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-catalog-content\") pod \"redhat-operators-qjd6t\" (UID: \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\") " pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.200973 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-utilities\") pod \"redhat-operators-qjd6t\" (UID: \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\") " pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.213063 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dfzs\" (UniqueName: \"kubernetes.io/projected/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-kube-api-access-8dfzs\") pod \"redhat-operators-qjd6t\" (UID: \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\") " pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.214603 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kkjnl"] Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.231415 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.317534 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.387361 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9r6l9\" (UniqueName: \"kubernetes.io/projected/152af819-6586-4345-b2ef-cb8ad845a6b1-kube-api-access-9r6l9\") pod \"152af819-6586-4345-b2ef-cb8ad845a6b1\" (UID: \"152af819-6586-4345-b2ef-cb8ad845a6b1\") " Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.387571 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/152af819-6586-4345-b2ef-cb8ad845a6b1-config-volume\") pod \"152af819-6586-4345-b2ef-cb8ad845a6b1\" (UID: \"152af819-6586-4345-b2ef-cb8ad845a6b1\") " Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.387648 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/152af819-6586-4345-b2ef-cb8ad845a6b1-secret-volume\") pod \"152af819-6586-4345-b2ef-cb8ad845a6b1\" (UID: \"152af819-6586-4345-b2ef-cb8ad845a6b1\") " Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.388444 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/152af819-6586-4345-b2ef-cb8ad845a6b1-config-volume" (OuterVolumeSpecName: "config-volume") pod "152af819-6586-4345-b2ef-cb8ad845a6b1" (UID: "152af819-6586-4345-b2ef-cb8ad845a6b1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.392935 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/152af819-6586-4345-b2ef-cb8ad845a6b1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "152af819-6586-4345-b2ef-cb8ad845a6b1" (UID: "152af819-6586-4345-b2ef-cb8ad845a6b1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.398994 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/152af819-6586-4345-b2ef-cb8ad845a6b1-kube-api-access-9r6l9" (OuterVolumeSpecName: "kube-api-access-9r6l9") pod "152af819-6586-4345-b2ef-cb8ad845a6b1" (UID: "152af819-6586-4345-b2ef-cb8ad845a6b1"). InnerVolumeSpecName "kube-api-access-9r6l9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.489352 4787 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/152af819-6586-4345-b2ef-cb8ad845a6b1-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.489927 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9r6l9\" (UniqueName: \"kubernetes.io/projected/152af819-6586-4345-b2ef-cb8ad845a6b1-kube-api-access-9r6l9\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.490017 4787 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/152af819-6586-4345-b2ef-cb8ad845a6b1-config-volume\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.508109 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qjd6t"] Jan 27 07:54:08 crc kubenswrapper[4787]: W0127 07:54:08.519153 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f5e46e1_1252_4a0a_ad11_8d4236f3759b.slice/crio-a7fc73f366dad2ee2177e45cec1c07c9411e1ae51b5099acd00178407f132bd0 WatchSource:0}: Error finding container a7fc73f366dad2ee2177e45cec1c07c9411e1ae51b5099acd00178407f132bd0: Status 404 returned error can't find the container with id a7fc73f366dad2ee2177e45cec1c07c9411e1ae51b5099acd00178407f132bd0 Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.722211 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:08 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:08 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:08 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.722302 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.929006 4787 generic.go:334] "Generic (PLEG): container finished" podID="1e42281e-20ed-4105-866f-878ffbf6c6eb" containerID="71a0fd62f051e1733036586c55bfd4c513668f6b2af829fed34f76f2fd16d4a2" exitCode=0 Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.929377 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kkjnl" event={"ID":"1e42281e-20ed-4105-866f-878ffbf6c6eb","Type":"ContainerDied","Data":"71a0fd62f051e1733036586c55bfd4c513668f6b2af829fed34f76f2fd16d4a2"} Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.929490 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kkjnl" event={"ID":"1e42281e-20ed-4105-866f-878ffbf6c6eb","Type":"ContainerStarted","Data":"69a14500beb72f949103663a0b1a2f611ef846aca8d992b4feec769e4103a913"} Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.940058 4787 generic.go:334] "Generic (PLEG): container finished" podID="38edf9c9-43b3-4895-ac38-d33e35dd84ff" containerID="f7112a8124ac99640f54d3193f30b49640a3e16a7e1b6909eba69d818ec9cb7b" exitCode=0 Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.940231 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"38edf9c9-43b3-4895-ac38-d33e35dd84ff","Type":"ContainerDied","Data":"f7112a8124ac99640f54d3193f30b49640a3e16a7e1b6909eba69d818ec9cb7b"} Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.952057 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" event={"ID":"152af819-6586-4345-b2ef-cb8ad845a6b1","Type":"ContainerDied","Data":"dd1d962cbba22622cc8f00742deb5445d3f8c1e1a6d321d913c3f39997ffb3b5"} Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.952127 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd1d962cbba22622cc8f00742deb5445d3f8c1e1a6d321d913c3f39997ffb3b5" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.952084 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29491665-p955p" Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.959323 4787 generic.go:334] "Generic (PLEG): container finished" podID="264daa3b-f5d9-407d-8835-35845525329a" containerID="fc80e896d018112629afc7413a0e19d17cf029077e3a9fbb67d67c7d973bf040" exitCode=0 Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.959426 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hz9l2" event={"ID":"264daa3b-f5d9-407d-8835-35845525329a","Type":"ContainerDied","Data":"fc80e896d018112629afc7413a0e19d17cf029077e3a9fbb67d67c7d973bf040"} Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.972374 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qjd6t" event={"ID":"5f5e46e1-1252-4a0a-ad11-8d4236f3759b","Type":"ContainerStarted","Data":"a7fc73f366dad2ee2177e45cec1c07c9411e1ae51b5099acd00178407f132bd0"} Jan 27 07:54:08 crc kubenswrapper[4787]: I0127 07:54:08.972426 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:09 crc kubenswrapper[4787]: I0127 07:54:09.713332 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:09 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:09 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:09 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:09 crc kubenswrapper[4787]: I0127 07:54:09.713833 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.006475 4787 generic.go:334] "Generic (PLEG): container finished" podID="5f5e46e1-1252-4a0a-ad11-8d4236f3759b" containerID="e485664fbc4a751f0a85db31317555b712de9d281e0fb607d7c774fccc36329f" exitCode=0 Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.008840 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qjd6t" event={"ID":"5f5e46e1-1252-4a0a-ad11-8d4236f3759b","Type":"ContainerDied","Data":"e485664fbc4a751f0a85db31317555b712de9d281e0fb607d7c774fccc36329f"} Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.461356 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.530274 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/38edf9c9-43b3-4895-ac38-d33e35dd84ff-kubelet-dir\") pod \"38edf9c9-43b3-4895-ac38-d33e35dd84ff\" (UID: \"38edf9c9-43b3-4895-ac38-d33e35dd84ff\") " Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.530467 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/38edf9c9-43b3-4895-ac38-d33e35dd84ff-kube-api-access\") pod \"38edf9c9-43b3-4895-ac38-d33e35dd84ff\" (UID: \"38edf9c9-43b3-4895-ac38-d33e35dd84ff\") " Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.531066 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38edf9c9-43b3-4895-ac38-d33e35dd84ff-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "38edf9c9-43b3-4895-ac38-d33e35dd84ff" (UID: "38edf9c9-43b3-4895-ac38-d33e35dd84ff"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.596644 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 27 07:54:10 crc kubenswrapper[4787]: E0127 07:54:10.597145 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="152af819-6586-4345-b2ef-cb8ad845a6b1" containerName="collect-profiles" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.597161 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="152af819-6586-4345-b2ef-cb8ad845a6b1" containerName="collect-profiles" Jan 27 07:54:10 crc kubenswrapper[4787]: E0127 07:54:10.597189 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38edf9c9-43b3-4895-ac38-d33e35dd84ff" containerName="pruner" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.597197 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="38edf9c9-43b3-4895-ac38-d33e35dd84ff" containerName="pruner" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.597332 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="152af819-6586-4345-b2ef-cb8ad845a6b1" containerName="collect-profiles" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.597349 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="38edf9c9-43b3-4895-ac38-d33e35dd84ff" containerName="pruner" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.598022 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.598115 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.601915 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.602830 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.631706 4787 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/38edf9c9-43b3-4895-ac38-d33e35dd84ff-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.637294 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38edf9c9-43b3-4895-ac38-d33e35dd84ff-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "38edf9c9-43b3-4895-ac38-d33e35dd84ff" (UID: "38edf9c9-43b3-4895-ac38-d33e35dd84ff"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.715165 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:10 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:10 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:10 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.715235 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.733716 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ad51913-b8b4-46f0-90d1-8334a8ce8ebe-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1ad51913-b8b4-46f0-90d1-8334a8ce8ebe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.733789 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ad51913-b8b4-46f0-90d1-8334a8ce8ebe-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1ad51913-b8b4-46f0-90d1-8334a8ce8ebe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.733928 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/38edf9c9-43b3-4895-ac38-d33e35dd84ff-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.835207 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ad51913-b8b4-46f0-90d1-8334a8ce8ebe-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1ad51913-b8b4-46f0-90d1-8334a8ce8ebe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.835336 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ad51913-b8b4-46f0-90d1-8334a8ce8ebe-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1ad51913-b8b4-46f0-90d1-8334a8ce8ebe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.835418 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ad51913-b8b4-46f0-90d1-8334a8ce8ebe-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1ad51913-b8b4-46f0-90d1-8334a8ce8ebe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.854070 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ad51913-b8b4-46f0-90d1-8334a8ce8ebe-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1ad51913-b8b4-46f0-90d1-8334a8ce8ebe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 07:54:10 crc kubenswrapper[4787]: I0127 07:54:10.929323 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 07:54:11 crc kubenswrapper[4787]: I0127 07:54:11.024508 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"38edf9c9-43b3-4895-ac38-d33e35dd84ff","Type":"ContainerDied","Data":"b76d9b5d789e59b56dccfabe215ff4d7bfa2fe6c0b0475134d18f24caf01b1e1"} Jan 27 07:54:11 crc kubenswrapper[4787]: I0127 07:54:11.024568 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b76d9b5d789e59b56dccfabe215ff4d7bfa2fe6c0b0475134d18f24caf01b1e1" Jan 27 07:54:11 crc kubenswrapper[4787]: I0127 07:54:11.024641 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 07:54:11 crc kubenswrapper[4787]: I0127 07:54:11.181070 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 27 07:54:11 crc kubenswrapper[4787]: W0127 07:54:11.187258 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod1ad51913_b8b4_46f0_90d1_8334a8ce8ebe.slice/crio-e04d4f313a2745357a590b2ccf3237015c1d625017bfc0adcfd3738e7cba346a WatchSource:0}: Error finding container e04d4f313a2745357a590b2ccf3237015c1d625017bfc0adcfd3738e7cba346a: Status 404 returned error can't find the container with id e04d4f313a2745357a590b2ccf3237015c1d625017bfc0adcfd3738e7cba346a Jan 27 07:54:11 crc kubenswrapper[4787]: I0127 07:54:11.717043 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:11 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:11 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:11 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:11 crc kubenswrapper[4787]: I0127 07:54:11.717132 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:11 crc kubenswrapper[4787]: I0127 07:54:11.969500 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:54:11 crc kubenswrapper[4787]: I0127 07:54:11.976986 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-zkx8b" Jan 27 07:54:12 crc kubenswrapper[4787]: I0127 07:54:12.074474 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1ad51913-b8b4-46f0-90d1-8334a8ce8ebe","Type":"ContainerStarted","Data":"efc3a4d788a7f2ef075bd61be09edf228fb114c6649606e1ca368b5477ca776f"} Jan 27 07:54:12 crc kubenswrapper[4787]: I0127 07:54:12.074947 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1ad51913-b8b4-46f0-90d1-8334a8ce8ebe","Type":"ContainerStarted","Data":"e04d4f313a2745357a590b2ccf3237015c1d625017bfc0adcfd3738e7cba346a"} Jan 27 07:54:12 crc kubenswrapper[4787]: I0127 07:54:12.095370 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.095345948 podStartE2EDuration="2.095345948s" podCreationTimestamp="2026-01-27 07:54:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:12.092866056 +0000 UTC m=+157.745221548" watchObservedRunningTime="2026-01-27 07:54:12.095345948 +0000 UTC m=+157.747701440" Jan 27 07:54:12 crc kubenswrapper[4787]: I0127 07:54:12.714250 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:12 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:12 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:12 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:12 crc kubenswrapper[4787]: I0127 07:54:12.714353 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:12 crc kubenswrapper[4787]: I0127 07:54:12.745843 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-rkzlx" Jan 27 07:54:13 crc kubenswrapper[4787]: I0127 07:54:13.083478 4787 generic.go:334] "Generic (PLEG): container finished" podID="1ad51913-b8b4-46f0-90d1-8334a8ce8ebe" containerID="efc3a4d788a7f2ef075bd61be09edf228fb114c6649606e1ca368b5477ca776f" exitCode=0 Jan 27 07:54:13 crc kubenswrapper[4787]: I0127 07:54:13.083526 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1ad51913-b8b4-46f0-90d1-8334a8ce8ebe","Type":"ContainerDied","Data":"efc3a4d788a7f2ef075bd61be09edf228fb114c6649606e1ca368b5477ca776f"} Jan 27 07:54:13 crc kubenswrapper[4787]: I0127 07:54:13.714598 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:13 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:13 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:13 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:13 crc kubenswrapper[4787]: I0127 07:54:13.715140 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:14 crc kubenswrapper[4787]: I0127 07:54:14.713477 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:14 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:14 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:14 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:14 crc kubenswrapper[4787]: I0127 07:54:14.714026 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:15 crc kubenswrapper[4787]: I0127 07:54:15.713932 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:15 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:15 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:15 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:15 crc kubenswrapper[4787]: I0127 07:54:15.714333 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:16 crc kubenswrapper[4787]: I0127 07:54:16.713650 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:16 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:16 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:16 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:16 crc kubenswrapper[4787]: I0127 07:54:16.713748 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:16 crc kubenswrapper[4787]: I0127 07:54:16.741175 4787 patch_prober.go:28] interesting pod/console-f9d7485db-qptnb container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Jan 27 07:54:16 crc kubenswrapper[4787]: I0127 07:54:16.741274 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-qptnb" podUID="b070600e-8a6f-4bb9-a1c2-e763f55d90eb" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Jan 27 07:54:16 crc kubenswrapper[4787]: I0127 07:54:16.805885 4787 patch_prober.go:28] interesting pod/downloads-7954f5f757-xwx4w container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 27 07:54:16 crc kubenswrapper[4787]: I0127 07:54:16.805985 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xwx4w" podUID="301e3f1a-19c5-47a7-b85d-81676098f971" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 27 07:54:16 crc kubenswrapper[4787]: I0127 07:54:16.806033 4787 patch_prober.go:28] interesting pod/downloads-7954f5f757-xwx4w container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 27 07:54:16 crc kubenswrapper[4787]: I0127 07:54:16.806103 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-xwx4w" podUID="301e3f1a-19c5-47a7-b85d-81676098f971" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 27 07:54:17 crc kubenswrapper[4787]: I0127 07:54:17.714202 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:17 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:17 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:17 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:17 crc kubenswrapper[4787]: I0127 07:54:17.714736 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:18 crc kubenswrapper[4787]: I0127 07:54:18.713488 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:18 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:18 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:18 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:18 crc kubenswrapper[4787]: I0127 07:54:18.713586 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:19 crc kubenswrapper[4787]: I0127 07:54:19.712824 4787 patch_prober.go:28] interesting pod/router-default-5444994796-jzdds container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 07:54:19 crc kubenswrapper[4787]: [-]has-synced failed: reason withheld Jan 27 07:54:19 crc kubenswrapper[4787]: [+]process-running ok Jan 27 07:54:19 crc kubenswrapper[4787]: healthz check failed Jan 27 07:54:19 crc kubenswrapper[4787]: I0127 07:54:19.712881 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzdds" podUID="5f4a2300-d512-490d-a876-6ad03f0c2f31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 07:54:19 crc kubenswrapper[4787]: I0127 07:54:19.910081 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 07:54:20 crc kubenswrapper[4787]: I0127 07:54:20.008421 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ad51913-b8b4-46f0-90d1-8334a8ce8ebe-kubelet-dir\") pod \"1ad51913-b8b4-46f0-90d1-8334a8ce8ebe\" (UID: \"1ad51913-b8b4-46f0-90d1-8334a8ce8ebe\") " Jan 27 07:54:20 crc kubenswrapper[4787]: I0127 07:54:20.009132 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ad51913-b8b4-46f0-90d1-8334a8ce8ebe-kube-api-access\") pod \"1ad51913-b8b4-46f0-90d1-8334a8ce8ebe\" (UID: \"1ad51913-b8b4-46f0-90d1-8334a8ce8ebe\") " Jan 27 07:54:20 crc kubenswrapper[4787]: I0127 07:54:20.008882 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1ad51913-b8b4-46f0-90d1-8334a8ce8ebe-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1ad51913-b8b4-46f0-90d1-8334a8ce8ebe" (UID: "1ad51913-b8b4-46f0-90d1-8334a8ce8ebe"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 07:54:20 crc kubenswrapper[4787]: I0127 07:54:20.009481 4787 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ad51913-b8b4-46f0-90d1-8334a8ce8ebe-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:20 crc kubenswrapper[4787]: I0127 07:54:20.017090 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ad51913-b8b4-46f0-90d1-8334a8ce8ebe-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1ad51913-b8b4-46f0-90d1-8334a8ce8ebe" (UID: "1ad51913-b8b4-46f0-90d1-8334a8ce8ebe"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:54:20 crc kubenswrapper[4787]: I0127 07:54:20.110745 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ad51913-b8b4-46f0-90d1-8334a8ce8ebe-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:20 crc kubenswrapper[4787]: I0127 07:54:20.139934 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1ad51913-b8b4-46f0-90d1-8334a8ce8ebe","Type":"ContainerDied","Data":"e04d4f313a2745357a590b2ccf3237015c1d625017bfc0adcfd3738e7cba346a"} Jan 27 07:54:20 crc kubenswrapper[4787]: I0127 07:54:20.139991 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e04d4f313a2745357a590b2ccf3237015c1d625017bfc0adcfd3738e7cba346a" Jan 27 07:54:20 crc kubenswrapper[4787]: I0127 07:54:20.140000 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 07:54:20 crc kubenswrapper[4787]: I0127 07:54:20.717660 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:54:20 crc kubenswrapper[4787]: I0127 07:54:20.734925 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-jzdds" Jan 27 07:54:21 crc kubenswrapper[4787]: I0127 07:54:21.736248 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs\") pod \"network-metrics-daemon-vws75\" (UID: \"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\") " pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:54:21 crc kubenswrapper[4787]: I0127 07:54:21.743579 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3969f21f-ab36-49b4-9a9c-02cf19e65ad0-metrics-certs\") pod \"network-metrics-daemon-vws75\" (UID: \"3969f21f-ab36-49b4-9a9c-02cf19e65ad0\") " pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:54:21 crc kubenswrapper[4787]: I0127 07:54:21.794288 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vws75" Jan 27 07:54:22 crc kubenswrapper[4787]: I0127 07:54:22.414411 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g44f6"] Jan 27 07:54:22 crc kubenswrapper[4787]: I0127 07:54:22.414689 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" podUID="ca4c081d-896c-4cc8-9656-59364376de35" containerName="controller-manager" containerID="cri-o://18d9ef1e7d3e588a5d939868f7c0a651f90760ac04dbbd15cc83a8ed490076a9" gracePeriod=30 Jan 27 07:54:22 crc kubenswrapper[4787]: I0127 07:54:22.435081 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t"] Jan 27 07:54:22 crc kubenswrapper[4787]: I0127 07:54:22.435356 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" podUID="0259775f-1fef-486a-bc17-4638e38ed83f" containerName="route-controller-manager" containerID="cri-o://2b47bd63962bfe1d1f2626a0146931ca7d71c50c1e9a2b870fbb6f7f3fafcb6f" gracePeriod=30 Jan 27 07:54:22 crc kubenswrapper[4787]: I0127 07:54:22.823311 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 07:54:22 crc kubenswrapper[4787]: I0127 07:54:22.823406 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 07:54:23 crc kubenswrapper[4787]: I0127 07:54:23.160978 4787 generic.go:334] "Generic (PLEG): container finished" podID="0259775f-1fef-486a-bc17-4638e38ed83f" containerID="2b47bd63962bfe1d1f2626a0146931ca7d71c50c1e9a2b870fbb6f7f3fafcb6f" exitCode=0 Jan 27 07:54:23 crc kubenswrapper[4787]: I0127 07:54:23.161100 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" event={"ID":"0259775f-1fef-486a-bc17-4638e38ed83f","Type":"ContainerDied","Data":"2b47bd63962bfe1d1f2626a0146931ca7d71c50c1e9a2b870fbb6f7f3fafcb6f"} Jan 27 07:54:23 crc kubenswrapper[4787]: I0127 07:54:23.163478 4787 generic.go:334] "Generic (PLEG): container finished" podID="ca4c081d-896c-4cc8-9656-59364376de35" containerID="18d9ef1e7d3e588a5d939868f7c0a651f90760ac04dbbd15cc83a8ed490076a9" exitCode=0 Jan 27 07:54:23 crc kubenswrapper[4787]: I0127 07:54:23.163526 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" event={"ID":"ca4c081d-896c-4cc8-9656-59364376de35","Type":"ContainerDied","Data":"18d9ef1e7d3e588a5d939868f7c0a651f90760ac04dbbd15cc83a8ed490076a9"} Jan 27 07:54:26 crc kubenswrapper[4787]: I0127 07:54:26.699292 4787 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-dlz8t container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Jan 27 07:54:26 crc kubenswrapper[4787]: I0127 07:54:26.699848 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" podUID="0259775f-1fef-486a-bc17-4638e38ed83f" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Jan 27 07:54:26 crc kubenswrapper[4787]: I0127 07:54:26.745741 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:54:26 crc kubenswrapper[4787]: I0127 07:54:26.750349 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-qptnb" Jan 27 07:54:26 crc kubenswrapper[4787]: I0127 07:54:26.816340 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-xwx4w" Jan 27 07:54:26 crc kubenswrapper[4787]: I0127 07:54:26.869626 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:54:26 crc kubenswrapper[4787]: I0127 07:54:26.928924 4787 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-g44f6 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Jan 27 07:54:26 crc kubenswrapper[4787]: I0127 07:54:26.929007 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" podUID="ca4c081d-896c-4cc8-9656-59364376de35" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" Jan 27 07:54:32 crc kubenswrapper[4787]: E0127 07:54:32.963714 4787 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 27 07:54:32 crc kubenswrapper[4787]: E0127 07:54:32.964999 4787 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nkvcd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-ht6h9_openshift-marketplace(624e9a13-c9c5-4ef3-8628-056bfc65338b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 27 07:54:32 crc kubenswrapper[4787]: E0127 07:54:32.966226 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-ht6h9" podUID="624e9a13-c9c5-4ef3-8628-056bfc65338b" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.352181 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4xj5z" Jan 27 07:54:37 crc kubenswrapper[4787]: E0127 07:54:37.571920 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-ht6h9" podUID="624e9a13-c9c5-4ef3-8628-056bfc65338b" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.622669 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.629363 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.674426 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm"] Jan 27 07:54:37 crc kubenswrapper[4787]: E0127 07:54:37.674903 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ad51913-b8b4-46f0-90d1-8334a8ce8ebe" containerName="pruner" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.674932 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ad51913-b8b4-46f0-90d1-8334a8ce8ebe" containerName="pruner" Jan 27 07:54:37 crc kubenswrapper[4787]: E0127 07:54:37.674950 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca4c081d-896c-4cc8-9656-59364376de35" containerName="controller-manager" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.674958 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca4c081d-896c-4cc8-9656-59364376de35" containerName="controller-manager" Jan 27 07:54:37 crc kubenswrapper[4787]: E0127 07:54:37.674977 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0259775f-1fef-486a-bc17-4638e38ed83f" containerName="route-controller-manager" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.674986 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="0259775f-1fef-486a-bc17-4638e38ed83f" containerName="route-controller-manager" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.675091 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="0259775f-1fef-486a-bc17-4638e38ed83f" containerName="route-controller-manager" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.675101 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ad51913-b8b4-46f0-90d1-8334a8ce8ebe" containerName="pruner" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.675113 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca4c081d-896c-4cc8-9656-59364376de35" containerName="controller-manager" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.675723 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.677751 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm"] Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.699967 4787 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-dlz8t container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.700039 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" podUID="0259775f-1fef-486a-bc17-4638e38ed83f" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.717076 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stktz\" (UniqueName: \"kubernetes.io/projected/0259775f-1fef-486a-bc17-4638e38ed83f-kube-api-access-stktz\") pod \"0259775f-1fef-486a-bc17-4638e38ed83f\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.717168 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-config\") pod \"ca4c081d-896c-4cc8-9656-59364376de35\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.717210 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0259775f-1fef-486a-bc17-4638e38ed83f-client-ca\") pod \"0259775f-1fef-486a-bc17-4638e38ed83f\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.717258 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-client-ca\") pod \"ca4c081d-896c-4cc8-9656-59364376de35\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.717288 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-proxy-ca-bundles\") pod \"ca4c081d-896c-4cc8-9656-59364376de35\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.717356 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0259775f-1fef-486a-bc17-4638e38ed83f-serving-cert\") pod \"0259775f-1fef-486a-bc17-4638e38ed83f\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.717393 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca4c081d-896c-4cc8-9656-59364376de35-serving-cert\") pod \"ca4c081d-896c-4cc8-9656-59364376de35\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.717441 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0259775f-1fef-486a-bc17-4638e38ed83f-config\") pod \"0259775f-1fef-486a-bc17-4638e38ed83f\" (UID: \"0259775f-1fef-486a-bc17-4638e38ed83f\") " Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.717493 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxmsq\" (UniqueName: \"kubernetes.io/projected/ca4c081d-896c-4cc8-9656-59364376de35-kube-api-access-kxmsq\") pod \"ca4c081d-896c-4cc8-9656-59364376de35\" (UID: \"ca4c081d-896c-4cc8-9656-59364376de35\") " Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.720049 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-config" (OuterVolumeSpecName: "config") pod "ca4c081d-896c-4cc8-9656-59364376de35" (UID: "ca4c081d-896c-4cc8-9656-59364376de35"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.720232 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0259775f-1fef-486a-bc17-4638e38ed83f-client-ca" (OuterVolumeSpecName: "client-ca") pod "0259775f-1fef-486a-bc17-4638e38ed83f" (UID: "0259775f-1fef-486a-bc17-4638e38ed83f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.720367 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0259775f-1fef-486a-bc17-4638e38ed83f-config" (OuterVolumeSpecName: "config") pod "0259775f-1fef-486a-bc17-4638e38ed83f" (UID: "0259775f-1fef-486a-bc17-4638e38ed83f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.722399 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-client-ca" (OuterVolumeSpecName: "client-ca") pod "ca4c081d-896c-4cc8-9656-59364376de35" (UID: "ca4c081d-896c-4cc8-9656-59364376de35"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.723541 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "ca4c081d-896c-4cc8-9656-59364376de35" (UID: "ca4c081d-896c-4cc8-9656-59364376de35"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.726727 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca4c081d-896c-4cc8-9656-59364376de35-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ca4c081d-896c-4cc8-9656-59364376de35" (UID: "ca4c081d-896c-4cc8-9656-59364376de35"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.728318 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca4c081d-896c-4cc8-9656-59364376de35-kube-api-access-kxmsq" (OuterVolumeSpecName: "kube-api-access-kxmsq") pod "ca4c081d-896c-4cc8-9656-59364376de35" (UID: "ca4c081d-896c-4cc8-9656-59364376de35"). InnerVolumeSpecName "kube-api-access-kxmsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.728585 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0259775f-1fef-486a-bc17-4638e38ed83f-kube-api-access-stktz" (OuterVolumeSpecName: "kube-api-access-stktz") pod "0259775f-1fef-486a-bc17-4638e38ed83f" (UID: "0259775f-1fef-486a-bc17-4638e38ed83f"). InnerVolumeSpecName "kube-api-access-stktz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.728617 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0259775f-1fef-486a-bc17-4638e38ed83f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0259775f-1fef-486a-bc17-4638e38ed83f" (UID: "0259775f-1fef-486a-bc17-4638e38ed83f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.819168 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-config\") pod \"route-controller-manager-cff864fb9-zqlsm\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.819274 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-client-ca\") pod \"route-controller-manager-cff864fb9-zqlsm\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.819316 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-serving-cert\") pod \"route-controller-manager-cff864fb9-zqlsm\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.819361 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlktw\" (UniqueName: \"kubernetes.io/projected/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-kube-api-access-vlktw\") pod \"route-controller-manager-cff864fb9-zqlsm\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.819415 4787 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0259775f-1fef-486a-bc17-4638e38ed83f-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.819428 4787 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.819441 4787 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.819454 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0259775f-1fef-486a-bc17-4638e38ed83f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.819464 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca4c081d-896c-4cc8-9656-59364376de35-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.819475 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0259775f-1fef-486a-bc17-4638e38ed83f-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.819484 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxmsq\" (UniqueName: \"kubernetes.io/projected/ca4c081d-896c-4cc8-9656-59364376de35-kube-api-access-kxmsq\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.819497 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stktz\" (UniqueName: \"kubernetes.io/projected/0259775f-1fef-486a-bc17-4638e38ed83f-kube-api-access-stktz\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.819508 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca4c081d-896c-4cc8-9656-59364376de35-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.920836 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlktw\" (UniqueName: \"kubernetes.io/projected/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-kube-api-access-vlktw\") pod \"route-controller-manager-cff864fb9-zqlsm\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.920920 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-config\") pod \"route-controller-manager-cff864fb9-zqlsm\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.921015 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-client-ca\") pod \"route-controller-manager-cff864fb9-zqlsm\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.921064 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-serving-cert\") pod \"route-controller-manager-cff864fb9-zqlsm\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.923129 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-config\") pod \"route-controller-manager-cff864fb9-zqlsm\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.923258 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-client-ca\") pod \"route-controller-manager-cff864fb9-zqlsm\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.928744 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-serving-cert\") pod \"route-controller-manager-cff864fb9-zqlsm\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.928839 4787 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-g44f6 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.928890 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" podUID="ca4c081d-896c-4cc8-9656-59364376de35" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 27 07:54:37 crc kubenswrapper[4787]: I0127 07:54:37.941398 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlktw\" (UniqueName: \"kubernetes.io/projected/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-kube-api-access-vlktw\") pod \"route-controller-manager-cff864fb9-zqlsm\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:38 crc kubenswrapper[4787]: I0127 07:54:38.008449 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:38 crc kubenswrapper[4787]: I0127 07:54:38.260729 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" event={"ID":"ca4c081d-896c-4cc8-9656-59364376de35","Type":"ContainerDied","Data":"7128b0f800ba7ef260edfd0bbe8553abf59eec44713d7f401d1f64afeed28b63"} Jan 27 07:54:38 crc kubenswrapper[4787]: I0127 07:54:38.260825 4787 scope.go:117] "RemoveContainer" containerID="18d9ef1e7d3e588a5d939868f7c0a651f90760ac04dbbd15cc83a8ed490076a9" Jan 27 07:54:38 crc kubenswrapper[4787]: I0127 07:54:38.260837 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-g44f6" Jan 27 07:54:38 crc kubenswrapper[4787]: I0127 07:54:38.267744 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" event={"ID":"0259775f-1fef-486a-bc17-4638e38ed83f","Type":"ContainerDied","Data":"41afcd3d4f1c7a6dc4b744f46a303d0903dfaf2cc98bd0b97df786bbe6ad0ce1"} Jan 27 07:54:38 crc kubenswrapper[4787]: I0127 07:54:38.267884 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t" Jan 27 07:54:38 crc kubenswrapper[4787]: I0127 07:54:38.303537 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g44f6"] Jan 27 07:54:38 crc kubenswrapper[4787]: I0127 07:54:38.308721 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g44f6"] Jan 27 07:54:38 crc kubenswrapper[4787]: I0127 07:54:38.311538 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t"] Jan 27 07:54:38 crc kubenswrapper[4787]: I0127 07:54:38.313960 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlz8t"] Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.085189 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0259775f-1fef-486a-bc17-4638e38ed83f" path="/var/lib/kubelet/pods/0259775f-1fef-486a-bc17-4638e38ed83f/volumes" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.085860 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca4c081d-896c-4cc8-9656-59364376de35" path="/var/lib/kubelet/pods/ca4c081d-896c-4cc8-9656-59364376de35/volumes" Jan 27 07:54:39 crc kubenswrapper[4787]: E0127 07:54:39.399407 4787 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 27 07:54:39 crc kubenswrapper[4787]: E0127 07:54:39.399650 4787 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z5r7l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-cgwgp_openshift-marketplace(0368ea79-94a8-42e3-8986-dddbec83d755): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 27 07:54:39 crc kubenswrapper[4787]: E0127 07:54:39.402764 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-cgwgp" podUID="0368ea79-94a8-42e3-8986-dddbec83d755" Jan 27 07:54:39 crc kubenswrapper[4787]: E0127 07:54:39.402992 4787 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 27 07:54:39 crc kubenswrapper[4787]: E0127 07:54:39.403223 4787 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5tmpp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-tnxzl_openshift-marketplace(44cc5af2-0636-4589-a988-e7e32bfea075): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 27 07:54:39 crc kubenswrapper[4787]: E0127 07:54:39.404413 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-tnxzl" podUID="44cc5af2-0636-4589-a988-e7e32bfea075" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.803415 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7666b944cf-w5xrh"] Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.804541 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.806631 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.806852 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.807184 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.807336 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.807673 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.807749 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.815512 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7666b944cf-w5xrh"] Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.815535 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.849041 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-client-ca\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.849089 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c5m4\" (UniqueName: \"kubernetes.io/projected/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-kube-api-access-5c5m4\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.849152 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-config\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.849206 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-serving-cert\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.849235 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-proxy-ca-bundles\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.950892 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-client-ca\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.950959 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c5m4\" (UniqueName: \"kubernetes.io/projected/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-kube-api-access-5c5m4\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.951060 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-config\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.951145 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-serving-cert\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.951174 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-proxy-ca-bundles\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.952123 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-client-ca\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.952438 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-proxy-ca-bundles\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.953878 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-config\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.962659 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-serving-cert\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:39 crc kubenswrapper[4787]: I0127 07:54:39.967880 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c5m4\" (UniqueName: \"kubernetes.io/projected/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-kube-api-access-5c5m4\") pod \"controller-manager-7666b944cf-w5xrh\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:40 crc kubenswrapper[4787]: I0127 07:54:40.144325 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:42 crc kubenswrapper[4787]: I0127 07:54:42.405632 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7666b944cf-w5xrh"] Jan 27 07:54:42 crc kubenswrapper[4787]: I0127 07:54:42.506301 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm"] Jan 27 07:54:43 crc kubenswrapper[4787]: E0127 07:54:43.081999 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-cgwgp" podUID="0368ea79-94a8-42e3-8986-dddbec83d755" Jan 27 07:54:43 crc kubenswrapper[4787]: E0127 07:54:43.082062 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-tnxzl" podUID="44cc5af2-0636-4589-a988-e7e32bfea075" Jan 27 07:54:43 crc kubenswrapper[4787]: I0127 07:54:43.103267 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 07:54:43 crc kubenswrapper[4787]: E0127 07:54:43.109281 4787 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 27 07:54:43 crc kubenswrapper[4787]: E0127 07:54:43.109879 4787 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-76lt5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-kkjnl_openshift-marketplace(1e42281e-20ed-4105-866f-878ffbf6c6eb): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 27 07:54:43 crc kubenswrapper[4787]: E0127 07:54:43.111075 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-kkjnl" podUID="1e42281e-20ed-4105-866f-878ffbf6c6eb" Jan 27 07:54:43 crc kubenswrapper[4787]: I0127 07:54:43.280972 4787 scope.go:117] "RemoveContainer" containerID="2b47bd63962bfe1d1f2626a0146931ca7d71c50c1e9a2b870fbb6f7f3fafcb6f" Jan 27 07:54:43 crc kubenswrapper[4787]: E0127 07:54:43.322300 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-kkjnl" podUID="1e42281e-20ed-4105-866f-878ffbf6c6eb" Jan 27 07:54:43 crc kubenswrapper[4787]: I0127 07:54:43.404359 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-vws75"] Jan 27 07:54:43 crc kubenswrapper[4787]: I0127 07:54:43.569312 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7666b944cf-w5xrh"] Jan 27 07:54:43 crc kubenswrapper[4787]: I0127 07:54:43.733058 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm"] Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.317809 4787 generic.go:334] "Generic (PLEG): container finished" podID="2a1ecf2a-6b91-48d7-873e-918dd4e045fc" containerID="e44397098e422e87e1da7b248b23d5f52f1cfcc38a65342a95ecf9164e6322e7" exitCode=0 Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.317882 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-672cg" event={"ID":"2a1ecf2a-6b91-48d7-873e-918dd4e045fc","Type":"ContainerDied","Data":"e44397098e422e87e1da7b248b23d5f52f1cfcc38a65342a95ecf9164e6322e7"} Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.321162 4787 generic.go:334] "Generic (PLEG): container finished" podID="7af45749-cd7f-490a-b2f6-bf979ea6467e" containerID="629cf5f25077b459468d115da73f8db540be3990ca6244c0ac21ff90a31aed2d" exitCode=0 Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.321254 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-npc57" event={"ID":"7af45749-cd7f-490a-b2f6-bf979ea6467e","Type":"ContainerDied","Data":"629cf5f25077b459468d115da73f8db540be3990ca6244c0ac21ff90a31aed2d"} Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.324593 4787 generic.go:334] "Generic (PLEG): container finished" podID="264daa3b-f5d9-407d-8835-35845525329a" containerID="1b10219e013f243bd9fe375bd99faab099e08f41d538f33974e456fa1220d7b7" exitCode=0 Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.324706 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hz9l2" event={"ID":"264daa3b-f5d9-407d-8835-35845525329a","Type":"ContainerDied","Data":"1b10219e013f243bd9fe375bd99faab099e08f41d538f33974e456fa1220d7b7"} Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.327484 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qjd6t" event={"ID":"5f5e46e1-1252-4a0a-ad11-8d4236f3759b","Type":"ContainerStarted","Data":"1309a0eb542609f82174c456dbe308ca94a8f5093a7b41ff79e5822733bbd396"} Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.329571 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" event={"ID":"9af0e159-7fb4-47fe-8d5a-fd19fee5a744","Type":"ContainerStarted","Data":"e3ab5135381411334039023b381683fb63d36e5ab4b56c6f4ac030bf964a1736"} Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.329645 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.329662 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" event={"ID":"9af0e159-7fb4-47fe-8d5a-fd19fee5a744","Type":"ContainerStarted","Data":"884316bdef089f92bdbee924916583c28a60a32993cc5267435c350c9bb28052"} Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.329975 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" podUID="9af0e159-7fb4-47fe-8d5a-fd19fee5a744" containerName="controller-manager" containerID="cri-o://e3ab5135381411334039023b381683fb63d36e5ab4b56c6f4ac030bf964a1736" gracePeriod=30 Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.332169 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" event={"ID":"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c","Type":"ContainerStarted","Data":"0b46d3e81ef90d81b3aecbd4aadc3d93c494fbfa41092064abad2218b8d21800"} Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.332204 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" event={"ID":"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c","Type":"ContainerStarted","Data":"1e78828407f5d5d949efad0e2b9135a9aabece8423effd00f64caebe0c4de681"} Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.332294 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" podUID="1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c" containerName="route-controller-manager" containerID="cri-o://0b46d3e81ef90d81b3aecbd4aadc3d93c494fbfa41092064abad2218b8d21800" gracePeriod=30 Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.332380 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.342607 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-vws75" event={"ID":"3969f21f-ab36-49b4-9a9c-02cf19e65ad0","Type":"ContainerStarted","Data":"385ed217c7bbaf2e23de994a50442d796713a2dd7491664da043cf4eaa88f48f"} Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.342658 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-vws75" event={"ID":"3969f21f-ab36-49b4-9a9c-02cf19e65ad0","Type":"ContainerStarted","Data":"f297f5ad07c994b453d6aceadb7664527c76c902648e36505ce47ccbd9bf9b79"} Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.342668 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-vws75" event={"ID":"3969f21f-ab36-49b4-9a9c-02cf19e65ad0","Type":"ContainerStarted","Data":"f9ba1717fd7cd617b8a2ba404bfc59d5b5cb58688f589de88190478a9ee60ca6"} Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.349881 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.389612 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-vws75" podStartSLOduration=165.389592315 podStartE2EDuration="2m45.389592315s" podCreationTimestamp="2026-01-27 07:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:44.387989859 +0000 UTC m=+190.040345341" watchObservedRunningTime="2026-01-27 07:54:44.389592315 +0000 UTC m=+190.041947797" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.414502 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" podStartSLOduration=22.414481615 podStartE2EDuration="22.414481615s" podCreationTimestamp="2026-01-27 07:54:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:44.410374141 +0000 UTC m=+190.062729633" watchObservedRunningTime="2026-01-27 07:54:44.414481615 +0000 UTC m=+190.066837107" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.432792 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" podStartSLOduration=22.432762523 podStartE2EDuration="22.432762523s" podCreationTimestamp="2026-01-27 07:54:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:44.428974186 +0000 UTC m=+190.081329678" watchObservedRunningTime="2026-01-27 07:54:44.432762523 +0000 UTC m=+190.085118005" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.527501 4787 patch_prober.go:28] interesting pod/route-controller-manager-cff864fb9-zqlsm container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.54:8443/healthz\": read tcp 10.217.0.2:46314->10.217.0.54:8443: read: connection reset by peer" start-of-body= Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.527589 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" podUID="1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.54:8443/healthz\": read tcp 10.217.0.2:46314->10.217.0.54:8443: read: connection reset by peer" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.762636 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.782854 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-cff864fb9-zqlsm_1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c/route-controller-manager/0.log" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.783008 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.829893 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-client-ca\") pod \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.829966 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-serving-cert\") pod \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.829997 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5c5m4\" (UniqueName: \"kubernetes.io/projected/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-kube-api-access-5c5m4\") pod \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.830068 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-serving-cert\") pod \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.830158 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-client-ca\") pod \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.830187 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-config\") pod \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.830236 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-proxy-ca-bundles\") pod \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\" (UID: \"9af0e159-7fb4-47fe-8d5a-fd19fee5a744\") " Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.830282 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-config\") pod \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.830338 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlktw\" (UniqueName: \"kubernetes.io/projected/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-kube-api-access-vlktw\") pod \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\" (UID: \"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c\") " Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.832455 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-config" (OuterVolumeSpecName: "config") pod "9af0e159-7fb4-47fe-8d5a-fd19fee5a744" (UID: "9af0e159-7fb4-47fe-8d5a-fd19fee5a744"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.832495 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "9af0e159-7fb4-47fe-8d5a-fd19fee5a744" (UID: "9af0e159-7fb4-47fe-8d5a-fd19fee5a744"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.832930 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-client-ca" (OuterVolumeSpecName: "client-ca") pod "9af0e159-7fb4-47fe-8d5a-fd19fee5a744" (UID: "9af0e159-7fb4-47fe-8d5a-fd19fee5a744"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.835833 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-config" (OuterVolumeSpecName: "config") pod "1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c" (UID: "1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.836026 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-client-ca" (OuterVolumeSpecName: "client-ca") pod "1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c" (UID: "1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.838884 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7598866b8d-sz455"] Jan 27 07:54:44 crc kubenswrapper[4787]: E0127 07:54:44.839298 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c" containerName="route-controller-manager" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.839312 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c" containerName="route-controller-manager" Jan 27 07:54:44 crc kubenswrapper[4787]: E0127 07:54:44.839328 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9af0e159-7fb4-47fe-8d5a-fd19fee5a744" containerName="controller-manager" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.839336 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="9af0e159-7fb4-47fe-8d5a-fd19fee5a744" containerName="controller-manager" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.839457 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c" containerName="route-controller-manager" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.839474 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="9af0e159-7fb4-47fe-8d5a-fd19fee5a744" containerName="controller-manager" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.840025 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.841326 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-kube-api-access-5c5m4" (OuterVolumeSpecName: "kube-api-access-5c5m4") pod "9af0e159-7fb4-47fe-8d5a-fd19fee5a744" (UID: "9af0e159-7fb4-47fe-8d5a-fd19fee5a744"). InnerVolumeSpecName "kube-api-access-5c5m4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.841382 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-kube-api-access-vlktw" (OuterVolumeSpecName: "kube-api-access-vlktw") pod "1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c" (UID: "1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c"). InnerVolumeSpecName "kube-api-access-vlktw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.842071 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c" (UID: "1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.845621 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9af0e159-7fb4-47fe-8d5a-fd19fee5a744" (UID: "9af0e159-7fb4-47fe-8d5a-fd19fee5a744"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.854075 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7598866b8d-sz455"] Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.932570 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6dnj\" (UniqueName: \"kubernetes.io/projected/1a623b08-d78c-4d91-bff0-6535d713eb2c-kube-api-access-z6dnj\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.932642 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a623b08-d78c-4d91-bff0-6535d713eb2c-serving-cert\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.932693 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-config\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.932717 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-proxy-ca-bundles\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.932741 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-client-ca\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.932813 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.932827 4787 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.932839 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.932865 4787 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.932879 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.932892 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlktw\" (UniqueName: \"kubernetes.io/projected/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-kube-api-access-vlktw\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.932907 4787 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.932919 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:44 crc kubenswrapper[4787]: I0127 07:54:44.932932 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5c5m4\" (UniqueName: \"kubernetes.io/projected/9af0e159-7fb4-47fe-8d5a-fd19fee5a744-kube-api-access-5c5m4\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.034300 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6dnj\" (UniqueName: \"kubernetes.io/projected/1a623b08-d78c-4d91-bff0-6535d713eb2c-kube-api-access-z6dnj\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.034368 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a623b08-d78c-4d91-bff0-6535d713eb2c-serving-cert\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.034413 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-config\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.034445 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-proxy-ca-bundles\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.034465 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-client-ca\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.036075 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-client-ca\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.036393 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-config\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.036774 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-proxy-ca-bundles\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.039939 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a623b08-d78c-4d91-bff0-6535d713eb2c-serving-cert\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.055395 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6dnj\" (UniqueName: \"kubernetes.io/projected/1a623b08-d78c-4d91-bff0-6535d713eb2c-kube-api-access-z6dnj\") pod \"controller-manager-7598866b8d-sz455\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.164817 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.377092 4787 generic.go:334] "Generic (PLEG): container finished" podID="9af0e159-7fb4-47fe-8d5a-fd19fee5a744" containerID="e3ab5135381411334039023b381683fb63d36e5ab4b56c6f4ac030bf964a1736" exitCode=0 Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.377161 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.377197 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" event={"ID":"9af0e159-7fb4-47fe-8d5a-fd19fee5a744","Type":"ContainerDied","Data":"e3ab5135381411334039023b381683fb63d36e5ab4b56c6f4ac030bf964a1736"} Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.377312 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7666b944cf-w5xrh" event={"ID":"9af0e159-7fb4-47fe-8d5a-fd19fee5a744","Type":"ContainerDied","Data":"884316bdef089f92bdbee924916583c28a60a32993cc5267435c350c9bb28052"} Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.377347 4787 scope.go:117] "RemoveContainer" containerID="e3ab5135381411334039023b381683fb63d36e5ab4b56c6f4ac030bf964a1736" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.382094 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-cff864fb9-zqlsm_1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c/route-controller-manager/0.log" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.382144 4787 generic.go:334] "Generic (PLEG): container finished" podID="1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c" containerID="0b46d3e81ef90d81b3aecbd4aadc3d93c494fbfa41092064abad2218b8d21800" exitCode=255 Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.382224 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" event={"ID":"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c","Type":"ContainerDied","Data":"0b46d3e81ef90d81b3aecbd4aadc3d93c494fbfa41092064abad2218b8d21800"} Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.382262 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" event={"ID":"1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c","Type":"ContainerDied","Data":"1e78828407f5d5d949efad0e2b9135a9aabece8423effd00f64caebe0c4de681"} Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.382224 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.384418 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-672cg" event={"ID":"2a1ecf2a-6b91-48d7-873e-918dd4e045fc","Type":"ContainerStarted","Data":"be7ffa9db32f83f23699c5d5677bd9eb9428eb33ca38c00529df07ae62a31547"} Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.390329 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-npc57" event={"ID":"7af45749-cd7f-490a-b2f6-bf979ea6467e","Type":"ContainerStarted","Data":"3f0dd71506f96b5f1d29b34337fb17e1db8a01c5d142ebe84263d8b6c18e1d79"} Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.394777 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hz9l2" event={"ID":"264daa3b-f5d9-407d-8835-35845525329a","Type":"ContainerStarted","Data":"92972139bf2205651890b39b93a8107ed20a0a61ec1230b6775bffc306789804"} Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.397636 4787 generic.go:334] "Generic (PLEG): container finished" podID="5f5e46e1-1252-4a0a-ad11-8d4236f3759b" containerID="1309a0eb542609f82174c456dbe308ca94a8f5093a7b41ff79e5822733bbd396" exitCode=0 Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.397795 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qjd6t" event={"ID":"5f5e46e1-1252-4a0a-ad11-8d4236f3759b","Type":"ContainerDied","Data":"1309a0eb542609f82174c456dbe308ca94a8f5093a7b41ff79e5822733bbd396"} Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.407588 4787 scope.go:117] "RemoveContainer" containerID="e3ab5135381411334039023b381683fb63d36e5ab4b56c6f4ac030bf964a1736" Jan 27 07:54:45 crc kubenswrapper[4787]: E0127 07:54:45.409139 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3ab5135381411334039023b381683fb63d36e5ab4b56c6f4ac030bf964a1736\": container with ID starting with e3ab5135381411334039023b381683fb63d36e5ab4b56c6f4ac030bf964a1736 not found: ID does not exist" containerID="e3ab5135381411334039023b381683fb63d36e5ab4b56c6f4ac030bf964a1736" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.409197 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3ab5135381411334039023b381683fb63d36e5ab4b56c6f4ac030bf964a1736"} err="failed to get container status \"e3ab5135381411334039023b381683fb63d36e5ab4b56c6f4ac030bf964a1736\": rpc error: code = NotFound desc = could not find container \"e3ab5135381411334039023b381683fb63d36e5ab4b56c6f4ac030bf964a1736\": container with ID starting with e3ab5135381411334039023b381683fb63d36e5ab4b56c6f4ac030bf964a1736 not found: ID does not exist" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.409262 4787 scope.go:117] "RemoveContainer" containerID="0b46d3e81ef90d81b3aecbd4aadc3d93c494fbfa41092064abad2218b8d21800" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.417186 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-672cg" podStartSLOduration=3.190670525 podStartE2EDuration="41.417163958s" podCreationTimestamp="2026-01-27 07:54:04 +0000 UTC" firstStartedPulling="2026-01-27 07:54:06.854581318 +0000 UTC m=+152.506936810" lastFinishedPulling="2026-01-27 07:54:45.081074751 +0000 UTC m=+190.733430243" observedRunningTime="2026-01-27 07:54:45.416592895 +0000 UTC m=+191.068948407" watchObservedRunningTime="2026-01-27 07:54:45.417163958 +0000 UTC m=+191.069519470" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.438688 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-npc57" podStartSLOduration=3.202571181 podStartE2EDuration="41.43865154s" podCreationTimestamp="2026-01-27 07:54:04 +0000 UTC" firstStartedPulling="2026-01-27 07:54:06.828324421 +0000 UTC m=+152.480679913" lastFinishedPulling="2026-01-27 07:54:45.06440478 +0000 UTC m=+190.716760272" observedRunningTime="2026-01-27 07:54:45.434631407 +0000 UTC m=+191.086986919" watchObservedRunningTime="2026-01-27 07:54:45.43865154 +0000 UTC m=+191.091007032" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.440387 4787 scope.go:117] "RemoveContainer" containerID="0b46d3e81ef90d81b3aecbd4aadc3d93c494fbfa41092064abad2218b8d21800" Jan 27 07:54:45 crc kubenswrapper[4787]: E0127 07:54:45.441263 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b46d3e81ef90d81b3aecbd4aadc3d93c494fbfa41092064abad2218b8d21800\": container with ID starting with 0b46d3e81ef90d81b3aecbd4aadc3d93c494fbfa41092064abad2218b8d21800 not found: ID does not exist" containerID="0b46d3e81ef90d81b3aecbd4aadc3d93c494fbfa41092064abad2218b8d21800" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.441302 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b46d3e81ef90d81b3aecbd4aadc3d93c494fbfa41092064abad2218b8d21800"} err="failed to get container status \"0b46d3e81ef90d81b3aecbd4aadc3d93c494fbfa41092064abad2218b8d21800\": rpc error: code = NotFound desc = could not find container \"0b46d3e81ef90d81b3aecbd4aadc3d93c494fbfa41092064abad2218b8d21800\": container with ID starting with 0b46d3e81ef90d81b3aecbd4aadc3d93c494fbfa41092064abad2218b8d21800 not found: ID does not exist" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.450617 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7666b944cf-w5xrh"] Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.454068 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7666b944cf-w5xrh"] Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.468100 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm"] Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.475475 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cff864fb9-zqlsm"] Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.484270 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hz9l2" podStartSLOduration=3.582884563 podStartE2EDuration="39.484246593s" podCreationTimestamp="2026-01-27 07:54:06 +0000 UTC" firstStartedPulling="2026-01-27 07:54:08.967714522 +0000 UTC m=+154.620070014" lastFinishedPulling="2026-01-27 07:54:44.869076552 +0000 UTC m=+190.521432044" observedRunningTime="2026-01-27 07:54:45.481342026 +0000 UTC m=+191.133697518" watchObservedRunningTime="2026-01-27 07:54:45.484246593 +0000 UTC m=+191.136602095" Jan 27 07:54:45 crc kubenswrapper[4787]: I0127 07:54:45.668206 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7598866b8d-sz455"] Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.406667 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" event={"ID":"1a623b08-d78c-4d91-bff0-6535d713eb2c","Type":"ContainerStarted","Data":"ec5d5918617f4cf77b4f25c6da400fa3ea9bb3863c461eac264ef2c1e57344ea"} Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.406737 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" event={"ID":"1a623b08-d78c-4d91-bff0-6535d713eb2c","Type":"ContainerStarted","Data":"3da2e262993ac94cc9dc72bad86247ba0291e69ae87e9695f743aab86626a5a0"} Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.407069 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.431476 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.436413 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" podStartSLOduration=4.436396311 podStartE2EDuration="4.436396311s" podCreationTimestamp="2026-01-27 07:54:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:46.433269259 +0000 UTC m=+192.085624761" watchObservedRunningTime="2026-01-27 07:54:46.436396311 +0000 UTC m=+192.088751813" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.808098 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl"] Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.808876 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.811249 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.811686 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.816444 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.816580 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.816664 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.816785 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.827883 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl"] Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.861951 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6381e89b-99e4-480e-9ab0-14a99d22b889-config\") pod \"route-controller-manager-774cb7c4cb-j9mkl\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.862052 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qplk2\" (UniqueName: \"kubernetes.io/projected/6381e89b-99e4-480e-9ab0-14a99d22b889-kube-api-access-qplk2\") pod \"route-controller-manager-774cb7c4cb-j9mkl\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.862087 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6381e89b-99e4-480e-9ab0-14a99d22b889-serving-cert\") pod \"route-controller-manager-774cb7c4cb-j9mkl\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.862147 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6381e89b-99e4-480e-9ab0-14a99d22b889-client-ca\") pod \"route-controller-manager-774cb7c4cb-j9mkl\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.963200 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6381e89b-99e4-480e-9ab0-14a99d22b889-config\") pod \"route-controller-manager-774cb7c4cb-j9mkl\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.963719 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qplk2\" (UniqueName: \"kubernetes.io/projected/6381e89b-99e4-480e-9ab0-14a99d22b889-kube-api-access-qplk2\") pod \"route-controller-manager-774cb7c4cb-j9mkl\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.963763 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6381e89b-99e4-480e-9ab0-14a99d22b889-serving-cert\") pod \"route-controller-manager-774cb7c4cb-j9mkl\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.963801 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6381e89b-99e4-480e-9ab0-14a99d22b889-client-ca\") pod \"route-controller-manager-774cb7c4cb-j9mkl\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.964508 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6381e89b-99e4-480e-9ab0-14a99d22b889-config\") pod \"route-controller-manager-774cb7c4cb-j9mkl\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.964569 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6381e89b-99e4-480e-9ab0-14a99d22b889-client-ca\") pod \"route-controller-manager-774cb7c4cb-j9mkl\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.971643 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6381e89b-99e4-480e-9ab0-14a99d22b889-serving-cert\") pod \"route-controller-manager-774cb7c4cb-j9mkl\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:46 crc kubenswrapper[4787]: I0127 07:54:46.984720 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qplk2\" (UniqueName: \"kubernetes.io/projected/6381e89b-99e4-480e-9ab0-14a99d22b889-kube-api-access-qplk2\") pod \"route-controller-manager-774cb7c4cb-j9mkl\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.084237 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c" path="/var/lib/kubelet/pods/1696f0ba-1f0c-4f1a-8510-bcdaa31fcb1c/volumes" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.085478 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9af0e159-7fb4-47fe-8d5a-fd19fee5a744" path="/var/lib/kubelet/pods/9af0e159-7fb4-47fe-8d5a-fd19fee5a744/volumes" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.153410 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.171465 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.172318 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.176503 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.176595 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.191666 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.268387 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dd23d06a-7017-4d11-a361-c556d5465959-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"dd23d06a-7017-4d11-a361-c556d5465959\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.268464 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dd23d06a-7017-4d11-a361-c556d5465959-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"dd23d06a-7017-4d11-a361-c556d5465959\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.274606 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.282743 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.370788 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dd23d06a-7017-4d11-a361-c556d5465959-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"dd23d06a-7017-4d11-a361-c556d5465959\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.371514 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dd23d06a-7017-4d11-a361-c556d5465959-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"dd23d06a-7017-4d11-a361-c556d5465959\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.371693 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dd23d06a-7017-4d11-a361-c556d5465959-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"dd23d06a-7017-4d11-a361-c556d5465959\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.401673 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dd23d06a-7017-4d11-a361-c556d5465959-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"dd23d06a-7017-4d11-a361-c556d5465959\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.442519 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qjd6t" event={"ID":"5f5e46e1-1252-4a0a-ad11-8d4236f3759b","Type":"ContainerStarted","Data":"273c3736c041c8021a3e5eb647bdda5506773181a199572370f5be6b85004a04"} Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.469178 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl"] Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.469932 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qjd6t" podStartSLOduration=3.753709912 podStartE2EDuration="40.469662484s" podCreationTimestamp="2026-01-27 07:54:07 +0000 UTC" firstStartedPulling="2026-01-27 07:54:10.032516346 +0000 UTC m=+155.684885719" lastFinishedPulling="2026-01-27 07:54:46.748482799 +0000 UTC m=+192.400838291" observedRunningTime="2026-01-27 07:54:47.465442718 +0000 UTC m=+193.117798220" watchObservedRunningTime="2026-01-27 07:54:47.469662484 +0000 UTC m=+193.122017986" Jan 27 07:54:47 crc kubenswrapper[4787]: W0127 07:54:47.476202 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6381e89b_99e4_480e_9ab0_14a99d22b889.slice/crio-d01da66c31038ba9bcead586f78d183db79fbd103a10f03106ce884e28edd5a4 WatchSource:0}: Error finding container d01da66c31038ba9bcead586f78d183db79fbd103a10f03106ce884e28edd5a4: Status 404 returned error can't find the container with id d01da66c31038ba9bcead586f78d183db79fbd103a10f03106ce884e28edd5a4 Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.558947 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 07:54:47 crc kubenswrapper[4787]: I0127 07:54:47.785311 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wdppr"] Jan 27 07:54:48 crc kubenswrapper[4787]: I0127 07:54:48.231911 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:48 crc kubenswrapper[4787]: I0127 07:54:48.232467 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:48 crc kubenswrapper[4787]: I0127 07:54:48.386017 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 27 07:54:48 crc kubenswrapper[4787]: I0127 07:54:48.467212 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" event={"ID":"6381e89b-99e4-480e-9ab0-14a99d22b889","Type":"ContainerStarted","Data":"ab12b522093957a0418fad02bce7b15c7c854402c69a9e25ea60cea48243dfae"} Jan 27 07:54:48 crc kubenswrapper[4787]: I0127 07:54:48.467775 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" event={"ID":"6381e89b-99e4-480e-9ab0-14a99d22b889","Type":"ContainerStarted","Data":"d01da66c31038ba9bcead586f78d183db79fbd103a10f03106ce884e28edd5a4"} Jan 27 07:54:48 crc kubenswrapper[4787]: I0127 07:54:48.467799 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:48 crc kubenswrapper[4787]: I0127 07:54:48.473415 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"dd23d06a-7017-4d11-a361-c556d5465959","Type":"ContainerStarted","Data":"e2f97ea5734c0c010856c4bb35c11965ffb4340ae9d1a8cd3f54f20186f9c38d"} Jan 27 07:54:48 crc kubenswrapper[4787]: I0127 07:54:48.487720 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" podStartSLOduration=6.487693709 podStartE2EDuration="6.487693709s" podCreationTimestamp="2026-01-27 07:54:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:48.485912448 +0000 UTC m=+194.138267950" watchObservedRunningTime="2026-01-27 07:54:48.487693709 +0000 UTC m=+194.140049201" Jan 27 07:54:48 crc kubenswrapper[4787]: I0127 07:54:48.506108 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:54:48 crc kubenswrapper[4787]: I0127 07:54:48.506812 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-hz9l2" podUID="264daa3b-f5d9-407d-8835-35845525329a" containerName="registry-server" probeResult="failure" output=< Jan 27 07:54:48 crc kubenswrapper[4787]: timeout: failed to connect service ":50051" within 1s Jan 27 07:54:48 crc kubenswrapper[4787]: > Jan 27 07:54:49 crc kubenswrapper[4787]: I0127 07:54:49.296408 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qjd6t" podUID="5f5e46e1-1252-4a0a-ad11-8d4236f3759b" containerName="registry-server" probeResult="failure" output=< Jan 27 07:54:49 crc kubenswrapper[4787]: timeout: failed to connect service ":50051" within 1s Jan 27 07:54:49 crc kubenswrapper[4787]: > Jan 27 07:54:49 crc kubenswrapper[4787]: I0127 07:54:49.500049 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"dd23d06a-7017-4d11-a361-c556d5465959","Type":"ContainerStarted","Data":"ef83b7d239fca38c8168a0c9a3d2feda0dca8b890e19aa7d945f7a660543828b"} Jan 27 07:54:49 crc kubenswrapper[4787]: I0127 07:54:49.524251 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=2.524225777 podStartE2EDuration="2.524225777s" podCreationTimestamp="2026-01-27 07:54:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:49.522456157 +0000 UTC m=+195.174811639" watchObservedRunningTime="2026-01-27 07:54:49.524225777 +0000 UTC m=+195.176581279" Jan 27 07:54:50 crc kubenswrapper[4787]: I0127 07:54:50.511292 4787 generic.go:334] "Generic (PLEG): container finished" podID="dd23d06a-7017-4d11-a361-c556d5465959" containerID="ef83b7d239fca38c8168a0c9a3d2feda0dca8b890e19aa7d945f7a660543828b" exitCode=0 Jan 27 07:54:50 crc kubenswrapper[4787]: I0127 07:54:50.511523 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"dd23d06a-7017-4d11-a361-c556d5465959","Type":"ContainerDied","Data":"ef83b7d239fca38c8168a0c9a3d2feda0dca8b890e19aa7d945f7a660543828b"} Jan 27 07:54:50 crc kubenswrapper[4787]: I0127 07:54:50.514321 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ht6h9" event={"ID":"624e9a13-c9c5-4ef3-8628-056bfc65338b","Type":"ContainerStarted","Data":"ec5e3c96098a08d05b7b94cefe4b2f9c131e71b862786c006019c2dec6da5498"} Jan 27 07:54:51 crc kubenswrapper[4787]: I0127 07:54:51.521201 4787 generic.go:334] "Generic (PLEG): container finished" podID="624e9a13-c9c5-4ef3-8628-056bfc65338b" containerID="ec5e3c96098a08d05b7b94cefe4b2f9c131e71b862786c006019c2dec6da5498" exitCode=0 Jan 27 07:54:51 crc kubenswrapper[4787]: I0127 07:54:51.521421 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ht6h9" event={"ID":"624e9a13-c9c5-4ef3-8628-056bfc65338b","Type":"ContainerDied","Data":"ec5e3c96098a08d05b7b94cefe4b2f9c131e71b862786c006019c2dec6da5498"} Jan 27 07:54:51 crc kubenswrapper[4787]: I0127 07:54:51.805760 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 07:54:51 crc kubenswrapper[4787]: I0127 07:54:51.851206 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dd23d06a-7017-4d11-a361-c556d5465959-kube-api-access\") pod \"dd23d06a-7017-4d11-a361-c556d5465959\" (UID: \"dd23d06a-7017-4d11-a361-c556d5465959\") " Jan 27 07:54:51 crc kubenswrapper[4787]: I0127 07:54:51.851281 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dd23d06a-7017-4d11-a361-c556d5465959-kubelet-dir\") pod \"dd23d06a-7017-4d11-a361-c556d5465959\" (UID: \"dd23d06a-7017-4d11-a361-c556d5465959\") " Jan 27 07:54:51 crc kubenswrapper[4787]: I0127 07:54:51.854470 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dd23d06a-7017-4d11-a361-c556d5465959-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "dd23d06a-7017-4d11-a361-c556d5465959" (UID: "dd23d06a-7017-4d11-a361-c556d5465959"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 07:54:51 crc kubenswrapper[4787]: I0127 07:54:51.859172 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd23d06a-7017-4d11-a361-c556d5465959-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "dd23d06a-7017-4d11-a361-c556d5465959" (UID: "dd23d06a-7017-4d11-a361-c556d5465959"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:54:51 crc kubenswrapper[4787]: I0127 07:54:51.953144 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dd23d06a-7017-4d11-a361-c556d5465959-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:51 crc kubenswrapper[4787]: I0127 07:54:51.953193 4787 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dd23d06a-7017-4d11-a361-c556d5465959-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 27 07:54:52 crc kubenswrapper[4787]: I0127 07:54:52.528802 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 07:54:52 crc kubenswrapper[4787]: I0127 07:54:52.529298 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"dd23d06a-7017-4d11-a361-c556d5465959","Type":"ContainerDied","Data":"e2f97ea5734c0c010856c4bb35c11965ffb4340ae9d1a8cd3f54f20186f9c38d"} Jan 27 07:54:52 crc kubenswrapper[4787]: I0127 07:54:52.529338 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2f97ea5734c0c010856c4bb35c11965ffb4340ae9d1a8cd3f54f20186f9c38d" Jan 27 07:54:52 crc kubenswrapper[4787]: I0127 07:54:52.532256 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ht6h9" event={"ID":"624e9a13-c9c5-4ef3-8628-056bfc65338b","Type":"ContainerStarted","Data":"c1c232a68764a7e2a2ab4410f7d9d45a3f03c3775264008e4c5ac89fab212864"} Jan 27 07:54:52 crc kubenswrapper[4787]: I0127 07:54:52.552810 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ht6h9" podStartSLOduration=2.463139923 podStartE2EDuration="46.552777191s" podCreationTimestamp="2026-01-27 07:54:06 +0000 UTC" firstStartedPulling="2026-01-27 07:54:07.871092545 +0000 UTC m=+153.523448037" lastFinishedPulling="2026-01-27 07:54:51.960729813 +0000 UTC m=+197.613085305" observedRunningTime="2026-01-27 07:54:52.549216621 +0000 UTC m=+198.201572123" watchObservedRunningTime="2026-01-27 07:54:52.552777191 +0000 UTC m=+198.205132683" Jan 27 07:54:52 crc kubenswrapper[4787]: I0127 07:54:52.823337 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 07:54:52 crc kubenswrapper[4787]: I0127 07:54:52.823408 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.167669 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 27 07:54:54 crc kubenswrapper[4787]: E0127 07:54:54.168623 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd23d06a-7017-4d11-a361-c556d5465959" containerName="pruner" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.168645 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd23d06a-7017-4d11-a361-c556d5465959" containerName="pruner" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.168778 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd23d06a-7017-4d11-a361-c556d5465959" containerName="pruner" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.169348 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.172693 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.172730 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.181365 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.288056 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d78a135f-a7de-4bb7-b2aa-168fb353658f-kube-api-access\") pod \"installer-9-crc\" (UID: \"d78a135f-a7de-4bb7-b2aa-168fb353658f\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.288139 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d78a135f-a7de-4bb7-b2aa-168fb353658f-kubelet-dir\") pod \"installer-9-crc\" (UID: \"d78a135f-a7de-4bb7-b2aa-168fb353658f\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.288183 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d78a135f-a7de-4bb7-b2aa-168fb353658f-var-lock\") pod \"installer-9-crc\" (UID: \"d78a135f-a7de-4bb7-b2aa-168fb353658f\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.389279 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d78a135f-a7de-4bb7-b2aa-168fb353658f-kubelet-dir\") pod \"installer-9-crc\" (UID: \"d78a135f-a7de-4bb7-b2aa-168fb353658f\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.389372 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d78a135f-a7de-4bb7-b2aa-168fb353658f-var-lock\") pod \"installer-9-crc\" (UID: \"d78a135f-a7de-4bb7-b2aa-168fb353658f\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.389426 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d78a135f-a7de-4bb7-b2aa-168fb353658f-kube-api-access\") pod \"installer-9-crc\" (UID: \"d78a135f-a7de-4bb7-b2aa-168fb353658f\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.389419 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d78a135f-a7de-4bb7-b2aa-168fb353658f-kubelet-dir\") pod \"installer-9-crc\" (UID: \"d78a135f-a7de-4bb7-b2aa-168fb353658f\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.389509 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d78a135f-a7de-4bb7-b2aa-168fb353658f-var-lock\") pod \"installer-9-crc\" (UID: \"d78a135f-a7de-4bb7-b2aa-168fb353658f\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.412706 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d78a135f-a7de-4bb7-b2aa-168fb353658f-kube-api-access\") pod \"installer-9-crc\" (UID: \"d78a135f-a7de-4bb7-b2aa-168fb353658f\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.497377 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.767267 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.767925 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.842333 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:54 crc kubenswrapper[4787]: I0127 07:54:54.942586 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 27 07:54:55 crc kubenswrapper[4787]: I0127 07:54:55.071197 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:55 crc kubenswrapper[4787]: I0127 07:54:55.071687 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:55 crc kubenswrapper[4787]: I0127 07:54:55.135211 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:55 crc kubenswrapper[4787]: I0127 07:54:55.550332 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d78a135f-a7de-4bb7-b2aa-168fb353658f","Type":"ContainerStarted","Data":"c5a010ebcd473be981facf7a933739c86185647f5b265aa8a4c5d4030136ffd4"} Jan 27 07:54:55 crc kubenswrapper[4787]: I0127 07:54:55.591854 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:55 crc kubenswrapper[4787]: I0127 07:54:55.591932 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:54:56 crc kubenswrapper[4787]: I0127 07:54:56.558251 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d78a135f-a7de-4bb7-b2aa-168fb353658f","Type":"ContainerStarted","Data":"86b21223e074e0eb982b5516ae2faf73f9fccba44d21367b5f9b9319a1d62acc"} Jan 27 07:54:56 crc kubenswrapper[4787]: I0127 07:54:56.607802 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.607770998 podStartE2EDuration="2.607770998s" podCreationTimestamp="2026-01-27 07:54:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:54:56.601059534 +0000 UTC m=+202.253415066" watchObservedRunningTime="2026-01-27 07:54:56.607770998 +0000 UTC m=+202.260126530" Jan 27 07:54:56 crc kubenswrapper[4787]: I0127 07:54:56.845094 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:56 crc kubenswrapper[4787]: I0127 07:54:56.845175 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:56 crc kubenswrapper[4787]: I0127 07:54:56.889620 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:57 crc kubenswrapper[4787]: I0127 07:54:57.333258 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:57 crc kubenswrapper[4787]: I0127 07:54:57.382334 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:54:57 crc kubenswrapper[4787]: I0127 07:54:57.615175 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:54:57 crc kubenswrapper[4787]: I0127 07:54:57.969664 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-672cg"] Jan 27 07:54:58 crc kubenswrapper[4787]: I0127 07:54:58.286436 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:58 crc kubenswrapper[4787]: I0127 07:54:58.343999 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:54:58 crc kubenswrapper[4787]: I0127 07:54:58.570350 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-672cg" podUID="2a1ecf2a-6b91-48d7-873e-918dd4e045fc" containerName="registry-server" containerID="cri-o://be7ffa9db32f83f23699c5d5677bd9eb9428eb33ca38c00529df07ae62a31547" gracePeriod=2 Jan 27 07:54:59 crc kubenswrapper[4787]: I0127 07:54:59.368257 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hz9l2"] Jan 27 07:54:59 crc kubenswrapper[4787]: I0127 07:54:59.368582 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hz9l2" podUID="264daa3b-f5d9-407d-8835-35845525329a" containerName="registry-server" containerID="cri-o://92972139bf2205651890b39b93a8107ed20a0a61ec1230b6775bffc306789804" gracePeriod=2 Jan 27 07:54:59 crc kubenswrapper[4787]: I0127 07:54:59.591634 4787 generic.go:334] "Generic (PLEG): container finished" podID="2a1ecf2a-6b91-48d7-873e-918dd4e045fc" containerID="be7ffa9db32f83f23699c5d5677bd9eb9428eb33ca38c00529df07ae62a31547" exitCode=0 Jan 27 07:54:59 crc kubenswrapper[4787]: I0127 07:54:59.591691 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-672cg" event={"ID":"2a1ecf2a-6b91-48d7-873e-918dd4e045fc","Type":"ContainerDied","Data":"be7ffa9db32f83f23699c5d5677bd9eb9428eb33ca38c00529df07ae62a31547"} Jan 27 07:54:59 crc kubenswrapper[4787]: I0127 07:54:59.594179 4787 generic.go:334] "Generic (PLEG): container finished" podID="0368ea79-94a8-42e3-8986-dddbec83d755" containerID="4c62d29717da4e2dafd29f1d1052ba2a291d903a0149abf52208f5ae61b47bc9" exitCode=0 Jan 27 07:54:59 crc kubenswrapper[4787]: I0127 07:54:59.594262 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cgwgp" event={"ID":"0368ea79-94a8-42e3-8986-dddbec83d755","Type":"ContainerDied","Data":"4c62d29717da4e2dafd29f1d1052ba2a291d903a0149abf52208f5ae61b47bc9"} Jan 27 07:54:59 crc kubenswrapper[4787]: I0127 07:54:59.600908 4787 generic.go:334] "Generic (PLEG): container finished" podID="44cc5af2-0636-4589-a988-e7e32bfea075" containerID="0cc6e3133b3811c2501b6fd4d34b20a999454792da7dd131df5d09583da6565d" exitCode=0 Jan 27 07:54:59 crc kubenswrapper[4787]: I0127 07:54:59.600952 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tnxzl" event={"ID":"44cc5af2-0636-4589-a988-e7e32bfea075","Type":"ContainerDied","Data":"0cc6e3133b3811c2501b6fd4d34b20a999454792da7dd131df5d09583da6565d"} Jan 27 07:54:59 crc kubenswrapper[4787]: I0127 07:54:59.865346 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:54:59 crc kubenswrapper[4787]: I0127 07:54:59.990579 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-catalog-content\") pod \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\" (UID: \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\") " Jan 27 07:54:59 crc kubenswrapper[4787]: I0127 07:54:59.990675 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-utilities\") pod \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\" (UID: \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\") " Jan 27 07:54:59 crc kubenswrapper[4787]: I0127 07:54:59.990802 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqrrx\" (UniqueName: \"kubernetes.io/projected/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-kube-api-access-bqrrx\") pod \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\" (UID: \"2a1ecf2a-6b91-48d7-873e-918dd4e045fc\") " Jan 27 07:54:59 crc kubenswrapper[4787]: I0127 07:54:59.992948 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-utilities" (OuterVolumeSpecName: "utilities") pod "2a1ecf2a-6b91-48d7-873e-918dd4e045fc" (UID: "2a1ecf2a-6b91-48d7-873e-918dd4e045fc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:55:00 crc kubenswrapper[4787]: I0127 07:55:00.001338 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-kube-api-access-bqrrx" (OuterVolumeSpecName: "kube-api-access-bqrrx") pod "2a1ecf2a-6b91-48d7-873e-918dd4e045fc" (UID: "2a1ecf2a-6b91-48d7-873e-918dd4e045fc"). InnerVolumeSpecName "kube-api-access-bqrrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:55:00 crc kubenswrapper[4787]: I0127 07:55:00.046295 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2a1ecf2a-6b91-48d7-873e-918dd4e045fc" (UID: "2a1ecf2a-6b91-48d7-873e-918dd4e045fc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:55:00 crc kubenswrapper[4787]: I0127 07:55:00.092326 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqrrx\" (UniqueName: \"kubernetes.io/projected/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-kube-api-access-bqrrx\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:00 crc kubenswrapper[4787]: I0127 07:55:00.092364 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:00 crc kubenswrapper[4787]: I0127 07:55:00.092374 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a1ecf2a-6b91-48d7-873e-918dd4e045fc-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:00 crc kubenswrapper[4787]: I0127 07:55:00.610315 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-672cg" event={"ID":"2a1ecf2a-6b91-48d7-873e-918dd4e045fc","Type":"ContainerDied","Data":"b8aaeac82942157bbd1bdc8b6aa3df78bb8abe8a0a411a20aacb7836a96b1339"} Jan 27 07:55:00 crc kubenswrapper[4787]: I0127 07:55:00.610749 4787 scope.go:117] "RemoveContainer" containerID="be7ffa9db32f83f23699c5d5677bd9eb9428eb33ca38c00529df07ae62a31547" Jan 27 07:55:00 crc kubenswrapper[4787]: I0127 07:55:00.610690 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-672cg" Jan 27 07:55:00 crc kubenswrapper[4787]: I0127 07:55:00.616449 4787 generic.go:334] "Generic (PLEG): container finished" podID="264daa3b-f5d9-407d-8835-35845525329a" containerID="92972139bf2205651890b39b93a8107ed20a0a61ec1230b6775bffc306789804" exitCode=0 Jan 27 07:55:00 crc kubenswrapper[4787]: I0127 07:55:00.616499 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hz9l2" event={"ID":"264daa3b-f5d9-407d-8835-35845525329a","Type":"ContainerDied","Data":"92972139bf2205651890b39b93a8107ed20a0a61ec1230b6775bffc306789804"} Jan 27 07:55:00 crc kubenswrapper[4787]: I0127 07:55:00.648067 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-672cg"] Jan 27 07:55:00 crc kubenswrapper[4787]: I0127 07:55:00.651412 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-672cg"] Jan 27 07:55:00 crc kubenswrapper[4787]: I0127 07:55:00.658688 4787 scope.go:117] "RemoveContainer" containerID="e44397098e422e87e1da7b248b23d5f52f1cfcc38a65342a95ecf9164e6322e7" Jan 27 07:55:00 crc kubenswrapper[4787]: I0127 07:55:00.681123 4787 scope.go:117] "RemoveContainer" containerID="01d251eebd7c2252e3ae9b7ab01cffa2fc304155551715934787979ebd43ab3e" Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.086840 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a1ecf2a-6b91-48d7-873e-918dd4e045fc" path="/var/lib/kubelet/pods/2a1ecf2a-6b91-48d7-873e-918dd4e045fc/volumes" Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.512521 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.624356 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hz9l2" event={"ID":"264daa3b-f5d9-407d-8835-35845525329a","Type":"ContainerDied","Data":"34b029af2088374d8be804c9373b55e19265a67e5ebece55f9fd4a6ba2ab1d08"} Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.624874 4787 scope.go:117] "RemoveContainer" containerID="92972139bf2205651890b39b93a8107ed20a0a61ec1230b6775bffc306789804" Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.624452 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hz9l2" Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.635080 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/264daa3b-f5d9-407d-8835-35845525329a-catalog-content\") pod \"264daa3b-f5d9-407d-8835-35845525329a\" (UID: \"264daa3b-f5d9-407d-8835-35845525329a\") " Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.635126 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/264daa3b-f5d9-407d-8835-35845525329a-utilities\") pod \"264daa3b-f5d9-407d-8835-35845525329a\" (UID: \"264daa3b-f5d9-407d-8835-35845525329a\") " Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.635159 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65qxh\" (UniqueName: \"kubernetes.io/projected/264daa3b-f5d9-407d-8835-35845525329a-kube-api-access-65qxh\") pod \"264daa3b-f5d9-407d-8835-35845525329a\" (UID: \"264daa3b-f5d9-407d-8835-35845525329a\") " Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.636640 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/264daa3b-f5d9-407d-8835-35845525329a-utilities" (OuterVolumeSpecName: "utilities") pod "264daa3b-f5d9-407d-8835-35845525329a" (UID: "264daa3b-f5d9-407d-8835-35845525329a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.643722 4787 scope.go:117] "RemoveContainer" containerID="1b10219e013f243bd9fe375bd99faab099e08f41d538f33974e456fa1220d7b7" Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.644181 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/264daa3b-f5d9-407d-8835-35845525329a-kube-api-access-65qxh" (OuterVolumeSpecName: "kube-api-access-65qxh") pod "264daa3b-f5d9-407d-8835-35845525329a" (UID: "264daa3b-f5d9-407d-8835-35845525329a"). InnerVolumeSpecName "kube-api-access-65qxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.663202 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/264daa3b-f5d9-407d-8835-35845525329a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "264daa3b-f5d9-407d-8835-35845525329a" (UID: "264daa3b-f5d9-407d-8835-35845525329a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.670092 4787 scope.go:117] "RemoveContainer" containerID="fc80e896d018112629afc7413a0e19d17cf029077e3a9fbb67d67c7d973bf040" Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.736042 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/264daa3b-f5d9-407d-8835-35845525329a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.736098 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/264daa3b-f5d9-407d-8835-35845525329a-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.736115 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65qxh\" (UniqueName: \"kubernetes.io/projected/264daa3b-f5d9-407d-8835-35845525329a-kube-api-access-65qxh\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.775222 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qjd6t"] Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.775731 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qjd6t" podUID="5f5e46e1-1252-4a0a-ad11-8d4236f3759b" containerName="registry-server" containerID="cri-o://273c3736c041c8021a3e5eb647bdda5506773181a199572370f5be6b85004a04" gracePeriod=2 Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.961682 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hz9l2"] Jan 27 07:55:01 crc kubenswrapper[4787]: I0127 07:55:01.964998 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hz9l2"] Jan 27 07:55:02 crc kubenswrapper[4787]: I0127 07:55:02.413146 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7598866b8d-sz455"] Jan 27 07:55:02 crc kubenswrapper[4787]: I0127 07:55:02.413471 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" podUID="1a623b08-d78c-4d91-bff0-6535d713eb2c" containerName="controller-manager" containerID="cri-o://ec5d5918617f4cf77b4f25c6da400fa3ea9bb3863c461eac264ef2c1e57344ea" gracePeriod=30 Jan 27 07:55:02 crc kubenswrapper[4787]: I0127 07:55:02.437978 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl"] Jan 27 07:55:02 crc kubenswrapper[4787]: I0127 07:55:02.438647 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" podUID="6381e89b-99e4-480e-9ab0-14a99d22b889" containerName="route-controller-manager" containerID="cri-o://ab12b522093957a0418fad02bce7b15c7c854402c69a9e25ea60cea48243dfae" gracePeriod=30 Jan 27 07:55:02 crc kubenswrapper[4787]: I0127 07:55:02.671518 4787 generic.go:334] "Generic (PLEG): container finished" podID="5f5e46e1-1252-4a0a-ad11-8d4236f3759b" containerID="273c3736c041c8021a3e5eb647bdda5506773181a199572370f5be6b85004a04" exitCode=0 Jan 27 07:55:02 crc kubenswrapper[4787]: I0127 07:55:02.672171 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qjd6t" event={"ID":"5f5e46e1-1252-4a0a-ad11-8d4236f3759b","Type":"ContainerDied","Data":"273c3736c041c8021a3e5eb647bdda5506773181a199572370f5be6b85004a04"} Jan 27 07:55:02 crc kubenswrapper[4787]: I0127 07:55:02.675909 4787 generic.go:334] "Generic (PLEG): container finished" podID="1a623b08-d78c-4d91-bff0-6535d713eb2c" containerID="ec5d5918617f4cf77b4f25c6da400fa3ea9bb3863c461eac264ef2c1e57344ea" exitCode=0 Jan 27 07:55:02 crc kubenswrapper[4787]: I0127 07:55:02.676020 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" event={"ID":"1a623b08-d78c-4d91-bff0-6535d713eb2c","Type":"ContainerDied","Data":"ec5d5918617f4cf77b4f25c6da400fa3ea9bb3863c461eac264ef2c1e57344ea"} Jan 27 07:55:02 crc kubenswrapper[4787]: I0127 07:55:02.680544 4787 generic.go:334] "Generic (PLEG): container finished" podID="6381e89b-99e4-480e-9ab0-14a99d22b889" containerID="ab12b522093957a0418fad02bce7b15c7c854402c69a9e25ea60cea48243dfae" exitCode=0 Jan 27 07:55:02 crc kubenswrapper[4787]: I0127 07:55:02.680612 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" event={"ID":"6381e89b-99e4-480e-9ab0-14a99d22b889","Type":"ContainerDied","Data":"ab12b522093957a0418fad02bce7b15c7c854402c69a9e25ea60cea48243dfae"} Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.083516 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="264daa3b-f5d9-407d-8835-35845525329a" path="/var/lib/kubelet/pods/264daa3b-f5d9-407d-8835-35845525329a/volumes" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.123016 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.129345 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.133152 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.162734 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qplk2\" (UniqueName: \"kubernetes.io/projected/6381e89b-99e4-480e-9ab0-14a99d22b889-kube-api-access-qplk2\") pod \"6381e89b-99e4-480e-9ab0-14a99d22b889\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.162812 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-config\") pod \"1a623b08-d78c-4d91-bff0-6535d713eb2c\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.162879 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6381e89b-99e4-480e-9ab0-14a99d22b889-client-ca\") pod \"6381e89b-99e4-480e-9ab0-14a99d22b889\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.162991 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6381e89b-99e4-480e-9ab0-14a99d22b889-config\") pod \"6381e89b-99e4-480e-9ab0-14a99d22b889\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.163034 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-catalog-content\") pod \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\" (UID: \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\") " Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.163058 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-proxy-ca-bundles\") pod \"1a623b08-d78c-4d91-bff0-6535d713eb2c\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.163652 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6381e89b-99e4-480e-9ab0-14a99d22b889-client-ca" (OuterVolumeSpecName: "client-ca") pod "6381e89b-99e4-480e-9ab0-14a99d22b889" (UID: "6381e89b-99e4-480e-9ab0-14a99d22b889"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.163874 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6381e89b-99e4-480e-9ab0-14a99d22b889-config" (OuterVolumeSpecName: "config") pod "6381e89b-99e4-480e-9ab0-14a99d22b889" (UID: "6381e89b-99e4-480e-9ab0-14a99d22b889"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.164333 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "1a623b08-d78c-4d91-bff0-6535d713eb2c" (UID: "1a623b08-d78c-4d91-bff0-6535d713eb2c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.164407 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-config" (OuterVolumeSpecName: "config") pod "1a623b08-d78c-4d91-bff0-6535d713eb2c" (UID: "1a623b08-d78c-4d91-bff0-6535d713eb2c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.164796 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dfzs\" (UniqueName: \"kubernetes.io/projected/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-kube-api-access-8dfzs\") pod \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\" (UID: \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\") " Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.164836 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-client-ca\") pod \"1a623b08-d78c-4d91-bff0-6535d713eb2c\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.164897 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6381e89b-99e4-480e-9ab0-14a99d22b889-serving-cert\") pod \"6381e89b-99e4-480e-9ab0-14a99d22b889\" (UID: \"6381e89b-99e4-480e-9ab0-14a99d22b889\") " Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.164965 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-utilities\") pod \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\" (UID: \"5f5e46e1-1252-4a0a-ad11-8d4236f3759b\") " Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.164990 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6dnj\" (UniqueName: \"kubernetes.io/projected/1a623b08-d78c-4d91-bff0-6535d713eb2c-kube-api-access-z6dnj\") pod \"1a623b08-d78c-4d91-bff0-6535d713eb2c\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.165049 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a623b08-d78c-4d91-bff0-6535d713eb2c-serving-cert\") pod \"1a623b08-d78c-4d91-bff0-6535d713eb2c\" (UID: \"1a623b08-d78c-4d91-bff0-6535d713eb2c\") " Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.165407 4787 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6381e89b-99e4-480e-9ab0-14a99d22b889-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.165444 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6381e89b-99e4-480e-9ab0-14a99d22b889-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.167027 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-utilities" (OuterVolumeSpecName: "utilities") pod "5f5e46e1-1252-4a0a-ad11-8d4236f3759b" (UID: "5f5e46e1-1252-4a0a-ad11-8d4236f3759b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.167495 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-client-ca" (OuterVolumeSpecName: "client-ca") pod "1a623b08-d78c-4d91-bff0-6535d713eb2c" (UID: "1a623b08-d78c-4d91-bff0-6535d713eb2c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.198506 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a623b08-d78c-4d91-bff0-6535d713eb2c-kube-api-access-z6dnj" (OuterVolumeSpecName: "kube-api-access-z6dnj") pod "1a623b08-d78c-4d91-bff0-6535d713eb2c" (UID: "1a623b08-d78c-4d91-bff0-6535d713eb2c"). InnerVolumeSpecName "kube-api-access-z6dnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.198529 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a623b08-d78c-4d91-bff0-6535d713eb2c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1a623b08-d78c-4d91-bff0-6535d713eb2c" (UID: "1a623b08-d78c-4d91-bff0-6535d713eb2c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.198604 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6381e89b-99e4-480e-9ab0-14a99d22b889-kube-api-access-qplk2" (OuterVolumeSpecName: "kube-api-access-qplk2") pod "6381e89b-99e4-480e-9ab0-14a99d22b889" (UID: "6381e89b-99e4-480e-9ab0-14a99d22b889"). InnerVolumeSpecName "kube-api-access-qplk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.198610 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-kube-api-access-8dfzs" (OuterVolumeSpecName: "kube-api-access-8dfzs") pod "5f5e46e1-1252-4a0a-ad11-8d4236f3759b" (UID: "5f5e46e1-1252-4a0a-ad11-8d4236f3759b"). InnerVolumeSpecName "kube-api-access-8dfzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.198515 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6381e89b-99e4-480e-9ab0-14a99d22b889-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6381e89b-99e4-480e-9ab0-14a99d22b889" (UID: "6381e89b-99e4-480e-9ab0-14a99d22b889"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.266919 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6dnj\" (UniqueName: \"kubernetes.io/projected/1a623b08-d78c-4d91-bff0-6535d713eb2c-kube-api-access-z6dnj\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.266967 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a623b08-d78c-4d91-bff0-6535d713eb2c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.266980 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qplk2\" (UniqueName: \"kubernetes.io/projected/6381e89b-99e4-480e-9ab0-14a99d22b889-kube-api-access-qplk2\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.266992 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.267001 4787 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.267010 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dfzs\" (UniqueName: \"kubernetes.io/projected/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-kube-api-access-8dfzs\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.267024 4787 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a623b08-d78c-4d91-bff0-6535d713eb2c-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.267038 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6381e89b-99e4-480e-9ab0-14a99d22b889-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.267050 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.322763 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f5e46e1-1252-4a0a-ad11-8d4236f3759b" (UID: "5f5e46e1-1252-4a0a-ad11-8d4236f3759b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.368351 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f5e46e1-1252-4a0a-ad11-8d4236f3759b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.697972 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cgwgp" event={"ID":"0368ea79-94a8-42e3-8986-dddbec83d755","Type":"ContainerStarted","Data":"40ccd79f42857c9cc5f59993f886e0ed52b8d538b0a585d795f0fbaf3b3c58f2"} Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.700210 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.700390 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl" event={"ID":"6381e89b-99e4-480e-9ab0-14a99d22b889","Type":"ContainerDied","Data":"d01da66c31038ba9bcead586f78d183db79fbd103a10f03106ce884e28edd5a4"} Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.700794 4787 scope.go:117] "RemoveContainer" containerID="ab12b522093957a0418fad02bce7b15c7c854402c69a9e25ea60cea48243dfae" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.706357 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tnxzl" event={"ID":"44cc5af2-0636-4589-a988-e7e32bfea075","Type":"ContainerStarted","Data":"e3cf97ca374aafda820a97034b2b6a3f6e20f34e82f747d6f2cfb26dfc59a223"} Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.718089 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qjd6t" event={"ID":"5f5e46e1-1252-4a0a-ad11-8d4236f3759b","Type":"ContainerDied","Data":"a7fc73f366dad2ee2177e45cec1c07c9411e1ae51b5099acd00178407f132bd0"} Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.718162 4787 scope.go:117] "RemoveContainer" containerID="273c3736c041c8021a3e5eb647bdda5506773181a199572370f5be6b85004a04" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.718283 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qjd6t" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.726643 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cgwgp" podStartSLOduration=4.059454039 podStartE2EDuration="59.726612456s" podCreationTimestamp="2026-01-27 07:54:04 +0000 UTC" firstStartedPulling="2026-01-27 07:54:06.833457422 +0000 UTC m=+152.485812924" lastFinishedPulling="2026-01-27 07:55:02.500615849 +0000 UTC m=+208.152971341" observedRunningTime="2026-01-27 07:55:03.718754834 +0000 UTC m=+209.371110346" watchObservedRunningTime="2026-01-27 07:55:03.726612456 +0000 UTC m=+209.378967948" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.728536 4787 generic.go:334] "Generic (PLEG): container finished" podID="1e42281e-20ed-4105-866f-878ffbf6c6eb" containerID="aba819bb87de56fd8e57aa84a316cf4c3386292e3c4ce220f195f806f8261dfd" exitCode=0 Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.728682 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kkjnl" event={"ID":"1e42281e-20ed-4105-866f-878ffbf6c6eb","Type":"ContainerDied","Data":"aba819bb87de56fd8e57aa84a316cf4c3386292e3c4ce220f195f806f8261dfd"} Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.752160 4787 scope.go:117] "RemoveContainer" containerID="1309a0eb542609f82174c456dbe308ca94a8f5093a7b41ff79e5822733bbd396" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.756319 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" event={"ID":"1a623b08-d78c-4d91-bff0-6535d713eb2c","Type":"ContainerDied","Data":"3da2e262993ac94cc9dc72bad86247ba0291e69ae87e9695f743aab86626a5a0"} Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.756441 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7598866b8d-sz455" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.772249 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tnxzl" podStartSLOduration=3.994312203 podStartE2EDuration="59.772210335s" podCreationTimestamp="2026-01-27 07:54:04 +0000 UTC" firstStartedPulling="2026-01-27 07:54:06.839814408 +0000 UTC m=+152.492169900" lastFinishedPulling="2026-01-27 07:55:02.61771253 +0000 UTC m=+208.270068032" observedRunningTime="2026-01-27 07:55:03.752859186 +0000 UTC m=+209.405214698" watchObservedRunningTime="2026-01-27 07:55:03.772210335 +0000 UTC m=+209.424565837" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.790981 4787 scope.go:117] "RemoveContainer" containerID="e485664fbc4a751f0a85db31317555b712de9d281e0fb607d7c774fccc36329f" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.813276 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl"] Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.815794 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774cb7c4cb-j9mkl"] Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.825804 4787 scope.go:117] "RemoveContainer" containerID="ec5d5918617f4cf77b4f25c6da400fa3ea9bb3863c461eac264ef2c1e57344ea" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.837366 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr"] Jan 27 07:55:03 crc kubenswrapper[4787]: E0127 07:55:03.837681 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="264daa3b-f5d9-407d-8835-35845525329a" containerName="extract-content" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.837844 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="264daa3b-f5d9-407d-8835-35845525329a" containerName="extract-content" Jan 27 07:55:03 crc kubenswrapper[4787]: E0127 07:55:03.837892 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f5e46e1-1252-4a0a-ad11-8d4236f3759b" containerName="extract-content" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.837903 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f5e46e1-1252-4a0a-ad11-8d4236f3759b" containerName="extract-content" Jan 27 07:55:03 crc kubenswrapper[4787]: E0127 07:55:03.837919 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a1ecf2a-6b91-48d7-873e-918dd4e045fc" containerName="extract-utilities" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.837929 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a1ecf2a-6b91-48d7-873e-918dd4e045fc" containerName="extract-utilities" Jan 27 07:55:03 crc kubenswrapper[4787]: E0127 07:55:03.837939 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a1ecf2a-6b91-48d7-873e-918dd4e045fc" containerName="extract-content" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.837947 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a1ecf2a-6b91-48d7-873e-918dd4e045fc" containerName="extract-content" Jan 27 07:55:03 crc kubenswrapper[4787]: E0127 07:55:03.837956 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6381e89b-99e4-480e-9ab0-14a99d22b889" containerName="route-controller-manager" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.837965 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="6381e89b-99e4-480e-9ab0-14a99d22b889" containerName="route-controller-manager" Jan 27 07:55:03 crc kubenswrapper[4787]: E0127 07:55:03.837979 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="264daa3b-f5d9-407d-8835-35845525329a" containerName="registry-server" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.837989 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="264daa3b-f5d9-407d-8835-35845525329a" containerName="registry-server" Jan 27 07:55:03 crc kubenswrapper[4787]: E0127 07:55:03.838003 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f5e46e1-1252-4a0a-ad11-8d4236f3759b" containerName="registry-server" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.838011 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f5e46e1-1252-4a0a-ad11-8d4236f3759b" containerName="registry-server" Jan 27 07:55:03 crc kubenswrapper[4787]: E0127 07:55:03.838021 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f5e46e1-1252-4a0a-ad11-8d4236f3759b" containerName="extract-utilities" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.838030 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f5e46e1-1252-4a0a-ad11-8d4236f3759b" containerName="extract-utilities" Jan 27 07:55:03 crc kubenswrapper[4787]: E0127 07:55:03.838040 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a1ecf2a-6b91-48d7-873e-918dd4e045fc" containerName="registry-server" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.838174 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a1ecf2a-6b91-48d7-873e-918dd4e045fc" containerName="registry-server" Jan 27 07:55:03 crc kubenswrapper[4787]: E0127 07:55:03.838233 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="264daa3b-f5d9-407d-8835-35845525329a" containerName="extract-utilities" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.838244 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="264daa3b-f5d9-407d-8835-35845525329a" containerName="extract-utilities" Jan 27 07:55:03 crc kubenswrapper[4787]: E0127 07:55:03.838254 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a623b08-d78c-4d91-bff0-6535d713eb2c" containerName="controller-manager" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.838262 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a623b08-d78c-4d91-bff0-6535d713eb2c" containerName="controller-manager" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.838387 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a623b08-d78c-4d91-bff0-6535d713eb2c" containerName="controller-manager" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.838405 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="6381e89b-99e4-480e-9ab0-14a99d22b889" containerName="route-controller-manager" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.838414 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f5e46e1-1252-4a0a-ad11-8d4236f3759b" containerName="registry-server" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.838422 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a1ecf2a-6b91-48d7-873e-918dd4e045fc" containerName="registry-server" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.838430 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="264daa3b-f5d9-407d-8835-35845525329a" containerName="registry-server" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.838897 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.844086 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.844335 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.844462 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.847060 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-777857c966-2h66n"] Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.848173 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.848392 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.848521 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.852156 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.853504 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7598866b8d-sz455"] Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.854089 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.855806 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.856291 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7598866b8d-sz455"] Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.858902 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.859140 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr"] Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.862094 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.864417 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-777857c966-2h66n"] Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.864831 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.865311 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.866603 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.870490 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qjd6t"] Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.876972 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-proxy-ca-bundles\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.877023 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7bl5\" (UniqueName: \"kubernetes.io/projected/816883bf-afa6-47b8-b823-7603ebf5cc71-kube-api-access-s7bl5\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.877049 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/816883bf-afa6-47b8-b823-7603ebf5cc71-serving-cert\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.877073 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/17fe0585-ad09-4585-9b4c-01698e2c295b-client-ca\") pod \"route-controller-manager-659f59795d-4hwcr\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.877110 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-client-ca\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.877729 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-config\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.877758 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17fe0585-ad09-4585-9b4c-01698e2c295b-serving-cert\") pod \"route-controller-manager-659f59795d-4hwcr\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.878035 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qjd6t"] Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.879670 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9b5xw\" (UniqueName: \"kubernetes.io/projected/17fe0585-ad09-4585-9b4c-01698e2c295b-kube-api-access-9b5xw\") pod \"route-controller-manager-659f59795d-4hwcr\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.879753 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17fe0585-ad09-4585-9b4c-01698e2c295b-config\") pod \"route-controller-manager-659f59795d-4hwcr\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.981185 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-client-ca\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.981250 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-config\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.981281 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17fe0585-ad09-4585-9b4c-01698e2c295b-serving-cert\") pod \"route-controller-manager-659f59795d-4hwcr\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.981314 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9b5xw\" (UniqueName: \"kubernetes.io/projected/17fe0585-ad09-4585-9b4c-01698e2c295b-kube-api-access-9b5xw\") pod \"route-controller-manager-659f59795d-4hwcr\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.981340 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17fe0585-ad09-4585-9b4c-01698e2c295b-config\") pod \"route-controller-manager-659f59795d-4hwcr\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.981375 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-proxy-ca-bundles\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.981406 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7bl5\" (UniqueName: \"kubernetes.io/projected/816883bf-afa6-47b8-b823-7603ebf5cc71-kube-api-access-s7bl5\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.981426 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/816883bf-afa6-47b8-b823-7603ebf5cc71-serving-cert\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.981451 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/17fe0585-ad09-4585-9b4c-01698e2c295b-client-ca\") pod \"route-controller-manager-659f59795d-4hwcr\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.982508 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-client-ca\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.983415 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-proxy-ca-bundles\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.983966 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/17fe0585-ad09-4585-9b4c-01698e2c295b-client-ca\") pod \"route-controller-manager-659f59795d-4hwcr\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.983959 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-config\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.984503 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17fe0585-ad09-4585-9b4c-01698e2c295b-config\") pod \"route-controller-manager-659f59795d-4hwcr\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.989366 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/816883bf-afa6-47b8-b823-7603ebf5cc71-serving-cert\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:03 crc kubenswrapper[4787]: I0127 07:55:03.989435 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17fe0585-ad09-4585-9b4c-01698e2c295b-serving-cert\") pod \"route-controller-manager-659f59795d-4hwcr\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.000230 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9b5xw\" (UniqueName: \"kubernetes.io/projected/17fe0585-ad09-4585-9b4c-01698e2c295b-kube-api-access-9b5xw\") pod \"route-controller-manager-659f59795d-4hwcr\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.001955 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7bl5\" (UniqueName: \"kubernetes.io/projected/816883bf-afa6-47b8-b823-7603ebf5cc71-kube-api-access-s7bl5\") pod \"controller-manager-777857c966-2h66n\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.170393 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.178170 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.516266 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-777857c966-2h66n"] Jan 27 07:55:04 crc kubenswrapper[4787]: W0127 07:55:04.524876 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod816883bf_afa6_47b8_b823_7603ebf5cc71.slice/crio-c09f056e6b522f051bebfcf76d79e3302d7058b741907e39732f827b109cc806 WatchSource:0}: Error finding container c09f056e6b522f051bebfcf76d79e3302d7058b741907e39732f827b109cc806: Status 404 returned error can't find the container with id c09f056e6b522f051bebfcf76d79e3302d7058b741907e39732f827b109cc806 Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.659879 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr"] Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.784327 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" event={"ID":"17fe0585-ad09-4585-9b4c-01698e2c295b","Type":"ContainerStarted","Data":"5d2b198f58bbf5e2fc1127f374689f197db99d090e6ffaf6f08fba9b1669dc76"} Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.787816 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-777857c966-2h66n" event={"ID":"816883bf-afa6-47b8-b823-7603ebf5cc71","Type":"ContainerStarted","Data":"5f1d64b82d7b846232716736a494d6727d50dc1adacd62ef1ab1de361e5c3498"} Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.787861 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-777857c966-2h66n" event={"ID":"816883bf-afa6-47b8-b823-7603ebf5cc71","Type":"ContainerStarted","Data":"c09f056e6b522f051bebfcf76d79e3302d7058b741907e39732f827b109cc806"} Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.788388 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.789701 4787 patch_prober.go:28] interesting pod/controller-manager-777857c966-2h66n container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.62:8443/healthz\": dial tcp 10.217.0.62:8443: connect: connection refused" start-of-body= Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.789747 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-777857c966-2h66n" podUID="816883bf-afa6-47b8-b823-7603ebf5cc71" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.62:8443/healthz\": dial tcp 10.217.0.62:8443: connect: connection refused" Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.822341 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-777857c966-2h66n" podStartSLOduration=2.822309154 podStartE2EDuration="2.822309154s" podCreationTimestamp="2026-01-27 07:55:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:55:04.819161981 +0000 UTC m=+210.471517483" watchObservedRunningTime="2026-01-27 07:55:04.822309154 +0000 UTC m=+210.474664646" Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.920162 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:55:04 crc kubenswrapper[4787]: I0127 07:55:04.921333 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:55:05 crc kubenswrapper[4787]: I0127 07:55:05.085330 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a623b08-d78c-4d91-bff0-6535d713eb2c" path="/var/lib/kubelet/pods/1a623b08-d78c-4d91-bff0-6535d713eb2c/volumes" Jan 27 07:55:05 crc kubenswrapper[4787]: I0127 07:55:05.086509 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f5e46e1-1252-4a0a-ad11-8d4236f3759b" path="/var/lib/kubelet/pods/5f5e46e1-1252-4a0a-ad11-8d4236f3759b/volumes" Jan 27 07:55:05 crc kubenswrapper[4787]: I0127 07:55:05.087447 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6381e89b-99e4-480e-9ab0-14a99d22b889" path="/var/lib/kubelet/pods/6381e89b-99e4-480e-9ab0-14a99d22b889/volumes" Jan 27 07:55:05 crc kubenswrapper[4787]: I0127 07:55:05.549425 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:55:05 crc kubenswrapper[4787]: I0127 07:55:05.550064 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:55:05 crc kubenswrapper[4787]: I0127 07:55:05.608456 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:55:05 crc kubenswrapper[4787]: I0127 07:55:05.797266 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kkjnl" event={"ID":"1e42281e-20ed-4105-866f-878ffbf6c6eb","Type":"ContainerStarted","Data":"8cb3ae0fcd39b44a275d8cd5c91a52e737b682bd55f4da55edd1a06941315d84"} Jan 27 07:55:05 crc kubenswrapper[4787]: I0127 07:55:05.799411 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" event={"ID":"17fe0585-ad09-4585-9b4c-01698e2c295b","Type":"ContainerStarted","Data":"dd74d0a47c882efd924ba6fb867cda61c83e9bb61f03bbd8c3f4c56cbfb22222"} Jan 27 07:55:05 crc kubenswrapper[4787]: I0127 07:55:05.800749 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:05 crc kubenswrapper[4787]: I0127 07:55:05.805362 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:05 crc kubenswrapper[4787]: I0127 07:55:05.811881 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:05 crc kubenswrapper[4787]: I0127 07:55:05.816434 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kkjnl" podStartSLOduration=3.043845194 podStartE2EDuration="58.816407415s" podCreationTimestamp="2026-01-27 07:54:07 +0000 UTC" firstStartedPulling="2026-01-27 07:54:08.935723462 +0000 UTC m=+154.588078954" lastFinishedPulling="2026-01-27 07:55:04.708285683 +0000 UTC m=+210.360641175" observedRunningTime="2026-01-27 07:55:05.81463046 +0000 UTC m=+211.466985972" watchObservedRunningTime="2026-01-27 07:55:05.816407415 +0000 UTC m=+211.468762917" Jan 27 07:55:05 crc kubenswrapper[4787]: I0127 07:55:05.835897 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" podStartSLOduration=3.835865108 podStartE2EDuration="3.835865108s" podCreationTimestamp="2026-01-27 07:55:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:55:05.833378913 +0000 UTC m=+211.485734405" watchObservedRunningTime="2026-01-27 07:55:05.835865108 +0000 UTC m=+211.488220610" Jan 27 07:55:05 crc kubenswrapper[4787]: I0127 07:55:05.967838 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-tnxzl" podUID="44cc5af2-0636-4589-a988-e7e32bfea075" containerName="registry-server" probeResult="failure" output=< Jan 27 07:55:05 crc kubenswrapper[4787]: timeout: failed to connect service ":50051" within 1s Jan 27 07:55:05 crc kubenswrapper[4787]: > Jan 27 07:55:07 crc kubenswrapper[4787]: I0127 07:55:07.840732 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:55:07 crc kubenswrapper[4787]: I0127 07:55:07.841171 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:55:08 crc kubenswrapper[4787]: I0127 07:55:08.884180 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kkjnl" podUID="1e42281e-20ed-4105-866f-878ffbf6c6eb" containerName="registry-server" probeResult="failure" output=< Jan 27 07:55:08 crc kubenswrapper[4787]: timeout: failed to connect service ":50051" within 1s Jan 27 07:55:08 crc kubenswrapper[4787]: > Jan 27 07:55:12 crc kubenswrapper[4787]: I0127 07:55:12.833340 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" podUID="fcc64739-2fd2-4413-a19e-0bc14dd883d6" containerName="oauth-openshift" containerID="cri-o://2d766c390986fb12647d9296b2ff8a7a88b1618b438430370daaded86b506218" gracePeriod=15 Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.277448 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.321408 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-796668b4d5-vqtrj"] Jan 27 07:55:13 crc kubenswrapper[4787]: E0127 07:55:13.325704 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcc64739-2fd2-4413-a19e-0bc14dd883d6" containerName="oauth-openshift" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.325750 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcc64739-2fd2-4413-a19e-0bc14dd883d6" containerName="oauth-openshift" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.325954 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcc64739-2fd2-4413-a19e-0bc14dd883d6" containerName="oauth-openshift" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.326504 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.335904 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-796668b4d5-vqtrj"] Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.368121 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-idp-0-file-data\") pod \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.368476 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-audit-policies\") pod \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.368593 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-provider-selection\") pod \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.368742 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-ocp-branding-template\") pod \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.368857 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-router-certs\") pod \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.368941 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-session\") pod \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.369080 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fcc64739-2fd2-4413-a19e-0bc14dd883d6-audit-dir\") pod \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.369179 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-trusted-ca-bundle\") pod \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.369355 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-serving-cert\") pod \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.369476 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c54z8\" (UniqueName: \"kubernetes.io/projected/fcc64739-2fd2-4413-a19e-0bc14dd883d6-kube-api-access-c54z8\") pod \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.369570 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-service-ca\") pod \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.369663 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-cliconfig\") pod \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.369749 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-login\") pod \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.369927 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-error\") pod \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\" (UID: \"fcc64739-2fd2-4413-a19e-0bc14dd883d6\") " Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.369670 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "fcc64739-2fd2-4413-a19e-0bc14dd883d6" (UID: "fcc64739-2fd2-4413-a19e-0bc14dd883d6"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.369694 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fcc64739-2fd2-4413-a19e-0bc14dd883d6-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "fcc64739-2fd2-4413-a19e-0bc14dd883d6" (UID: "fcc64739-2fd2-4413-a19e-0bc14dd883d6"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.370176 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "fcc64739-2fd2-4413-a19e-0bc14dd883d6" (UID: "fcc64739-2fd2-4413-a19e-0bc14dd883d6"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.370410 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "fcc64739-2fd2-4413-a19e-0bc14dd883d6" (UID: "fcc64739-2fd2-4413-a19e-0bc14dd883d6"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.370447 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "fcc64739-2fd2-4413-a19e-0bc14dd883d6" (UID: "fcc64739-2fd2-4413-a19e-0bc14dd883d6"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.376326 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "fcc64739-2fd2-4413-a19e-0bc14dd883d6" (UID: "fcc64739-2fd2-4413-a19e-0bc14dd883d6"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.376590 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "fcc64739-2fd2-4413-a19e-0bc14dd883d6" (UID: "fcc64739-2fd2-4413-a19e-0bc14dd883d6"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.377791 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "fcc64739-2fd2-4413-a19e-0bc14dd883d6" (UID: "fcc64739-2fd2-4413-a19e-0bc14dd883d6"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.378047 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "fcc64739-2fd2-4413-a19e-0bc14dd883d6" (UID: "fcc64739-2fd2-4413-a19e-0bc14dd883d6"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.378210 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "fcc64739-2fd2-4413-a19e-0bc14dd883d6" (UID: "fcc64739-2fd2-4413-a19e-0bc14dd883d6"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.378504 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "fcc64739-2fd2-4413-a19e-0bc14dd883d6" (UID: "fcc64739-2fd2-4413-a19e-0bc14dd883d6"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.378850 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "fcc64739-2fd2-4413-a19e-0bc14dd883d6" (UID: "fcc64739-2fd2-4413-a19e-0bc14dd883d6"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.378913 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "fcc64739-2fd2-4413-a19e-0bc14dd883d6" (UID: "fcc64739-2fd2-4413-a19e-0bc14dd883d6"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.382311 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcc64739-2fd2-4413-a19e-0bc14dd883d6-kube-api-access-c54z8" (OuterVolumeSpecName: "kube-api-access-c54z8") pod "fcc64739-2fd2-4413-a19e-0bc14dd883d6" (UID: "fcc64739-2fd2-4413-a19e-0bc14dd883d6"). InnerVolumeSpecName "kube-api-access-c54z8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.471601 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-user-template-login\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.471922 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-session\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.472049 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.472139 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdkw4\" (UniqueName: \"kubernetes.io/projected/0bd74928-5db8-49c1-ba2f-c8637ea86d46-kube-api-access-mdkw4\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.472225 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.472301 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0bd74928-5db8-49c1-ba2f-c8637ea86d46-audit-dir\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.472400 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.472499 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-router-certs\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.472624 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0bd74928-5db8-49c1-ba2f-c8637ea86d46-audit-policies\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.472727 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-service-ca\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.472866 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-user-template-error\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.473007 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-serving-cert\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.473105 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-cliconfig\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.473229 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.473373 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.473465 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.473577 4787 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fcc64739-2fd2-4413-a19e-0bc14dd883d6-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.473675 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.473744 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.473809 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c54z8\" (UniqueName: \"kubernetes.io/projected/fcc64739-2fd2-4413-a19e-0bc14dd883d6-kube-api-access-c54z8\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.473867 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.473930 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.473994 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.474137 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.474273 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.474368 4787 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fcc64739-2fd2-4413-a19e-0bc14dd883d6-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.474454 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.474524 4787 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fcc64739-2fd2-4413-a19e-0bc14dd883d6-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.576227 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.576318 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdkw4\" (UniqueName: \"kubernetes.io/projected/0bd74928-5db8-49c1-ba2f-c8637ea86d46-kube-api-access-mdkw4\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.576384 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.576433 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.576500 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0bd74928-5db8-49c1-ba2f-c8637ea86d46-audit-dir\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.576591 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-router-certs\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.576676 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0bd74928-5db8-49c1-ba2f-c8637ea86d46-audit-policies\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.576726 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-service-ca\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.576795 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-user-template-error\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.576868 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-cliconfig\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.577791 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0bd74928-5db8-49c1-ba2f-c8637ea86d46-audit-policies\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.577828 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.577923 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0bd74928-5db8-49c1-ba2f-c8637ea86d46-audit-dir\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.579609 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-serving-cert\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.579671 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.579733 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-user-template-login\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.579759 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-session\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.580674 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.581189 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-service-ca\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.581499 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-cliconfig\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.585063 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-router-certs\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.585099 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-user-template-error\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.585216 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-user-template-login\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.585749 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.586310 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-session\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.587076 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-system-serving-cert\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.588054 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0bd74928-5db8-49c1-ba2f-c8637ea86d46-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.593879 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdkw4\" (UniqueName: \"kubernetes.io/projected/0bd74928-5db8-49c1-ba2f-c8637ea86d46-kube-api-access-mdkw4\") pod \"oauth-openshift-796668b4d5-vqtrj\" (UID: \"0bd74928-5db8-49c1-ba2f-c8637ea86d46\") " pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.662596 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.852808 4787 generic.go:334] "Generic (PLEG): container finished" podID="fcc64739-2fd2-4413-a19e-0bc14dd883d6" containerID="2d766c390986fb12647d9296b2ff8a7a88b1618b438430370daaded86b506218" exitCode=0 Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.852913 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" event={"ID":"fcc64739-2fd2-4413-a19e-0bc14dd883d6","Type":"ContainerDied","Data":"2d766c390986fb12647d9296b2ff8a7a88b1618b438430370daaded86b506218"} Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.852952 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" event={"ID":"fcc64739-2fd2-4413-a19e-0bc14dd883d6","Type":"ContainerDied","Data":"0586f60c3abf6a05bb8ec725bce60998408f815b2354ed56f05b7be6dbc58bdc"} Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.852971 4787 scope.go:117] "RemoveContainer" containerID="2d766c390986fb12647d9296b2ff8a7a88b1618b438430370daaded86b506218" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.853111 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wdppr" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.887639 4787 scope.go:117] "RemoveContainer" containerID="2d766c390986fb12647d9296b2ff8a7a88b1618b438430370daaded86b506218" Jan 27 07:55:13 crc kubenswrapper[4787]: E0127 07:55:13.888901 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d766c390986fb12647d9296b2ff8a7a88b1618b438430370daaded86b506218\": container with ID starting with 2d766c390986fb12647d9296b2ff8a7a88b1618b438430370daaded86b506218 not found: ID does not exist" containerID="2d766c390986fb12647d9296b2ff8a7a88b1618b438430370daaded86b506218" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.888983 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d766c390986fb12647d9296b2ff8a7a88b1618b438430370daaded86b506218"} err="failed to get container status \"2d766c390986fb12647d9296b2ff8a7a88b1618b438430370daaded86b506218\": rpc error: code = NotFound desc = could not find container \"2d766c390986fb12647d9296b2ff8a7a88b1618b438430370daaded86b506218\": container with ID starting with 2d766c390986fb12647d9296b2ff8a7a88b1618b438430370daaded86b506218 not found: ID does not exist" Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.898183 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wdppr"] Jan 27 07:55:13 crc kubenswrapper[4787]: I0127 07:55:13.905842 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wdppr"] Jan 27 07:55:14 crc kubenswrapper[4787]: I0127 07:55:14.134337 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-796668b4d5-vqtrj"] Jan 27 07:55:14 crc kubenswrapper[4787]: I0127 07:55:14.863613 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" event={"ID":"0bd74928-5db8-49c1-ba2f-c8637ea86d46","Type":"ContainerStarted","Data":"19459ce0cd0eaf9ca8584f7b28e2280652ac5238fb1a67badc08fb044a4438e3"} Jan 27 07:55:14 crc kubenswrapper[4787]: I0127 07:55:14.864370 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:14 crc kubenswrapper[4787]: I0127 07:55:14.864589 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" event={"ID":"0bd74928-5db8-49c1-ba2f-c8637ea86d46","Type":"ContainerStarted","Data":"cc0d409f59f07f3cb4500b4109d68716eccc076c958596fc2e5b3bc1722c5080"} Jan 27 07:55:14 crc kubenswrapper[4787]: I0127 07:55:14.895828 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" podStartSLOduration=27.895793227 podStartE2EDuration="27.895793227s" podCreationTimestamp="2026-01-27 07:54:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:55:14.889270131 +0000 UTC m=+220.541625713" watchObservedRunningTime="2026-01-27 07:55:14.895793227 +0000 UTC m=+220.548148759" Jan 27 07:55:14 crc kubenswrapper[4787]: I0127 07:55:14.975428 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:55:15 crc kubenswrapper[4787]: I0127 07:55:15.048661 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:55:15 crc kubenswrapper[4787]: I0127 07:55:15.060962 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-796668b4d5-vqtrj" Jan 27 07:55:15 crc kubenswrapper[4787]: I0127 07:55:15.083565 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcc64739-2fd2-4413-a19e-0bc14dd883d6" path="/var/lib/kubelet/pods/fcc64739-2fd2-4413-a19e-0bc14dd883d6/volumes" Jan 27 07:55:15 crc kubenswrapper[4787]: I0127 07:55:15.600792 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.225275 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cgwgp"] Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.225505 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cgwgp" podUID="0368ea79-94a8-42e3-8986-dddbec83d755" containerName="registry-server" containerID="cri-o://40ccd79f42857c9cc5f59993f886e0ed52b8d538b0a585d795f0fbaf3b3c58f2" gracePeriod=2 Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.756574 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.832633 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5r7l\" (UniqueName: \"kubernetes.io/projected/0368ea79-94a8-42e3-8986-dddbec83d755-kube-api-access-z5r7l\") pod \"0368ea79-94a8-42e3-8986-dddbec83d755\" (UID: \"0368ea79-94a8-42e3-8986-dddbec83d755\") " Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.832883 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0368ea79-94a8-42e3-8986-dddbec83d755-catalog-content\") pod \"0368ea79-94a8-42e3-8986-dddbec83d755\" (UID: \"0368ea79-94a8-42e3-8986-dddbec83d755\") " Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.832916 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0368ea79-94a8-42e3-8986-dddbec83d755-utilities\") pod \"0368ea79-94a8-42e3-8986-dddbec83d755\" (UID: \"0368ea79-94a8-42e3-8986-dddbec83d755\") " Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.834057 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0368ea79-94a8-42e3-8986-dddbec83d755-utilities" (OuterVolumeSpecName: "utilities") pod "0368ea79-94a8-42e3-8986-dddbec83d755" (UID: "0368ea79-94a8-42e3-8986-dddbec83d755"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.841815 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0368ea79-94a8-42e3-8986-dddbec83d755-kube-api-access-z5r7l" (OuterVolumeSpecName: "kube-api-access-z5r7l") pod "0368ea79-94a8-42e3-8986-dddbec83d755" (UID: "0368ea79-94a8-42e3-8986-dddbec83d755"). InnerVolumeSpecName "kube-api-access-z5r7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.891748 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0368ea79-94a8-42e3-8986-dddbec83d755-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0368ea79-94a8-42e3-8986-dddbec83d755" (UID: "0368ea79-94a8-42e3-8986-dddbec83d755"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.895802 4787 generic.go:334] "Generic (PLEG): container finished" podID="0368ea79-94a8-42e3-8986-dddbec83d755" containerID="40ccd79f42857c9cc5f59993f886e0ed52b8d538b0a585d795f0fbaf3b3c58f2" exitCode=0 Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.895931 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cgwgp" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.895928 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cgwgp" event={"ID":"0368ea79-94a8-42e3-8986-dddbec83d755","Type":"ContainerDied","Data":"40ccd79f42857c9cc5f59993f886e0ed52b8d538b0a585d795f0fbaf3b3c58f2"} Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.896024 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cgwgp" event={"ID":"0368ea79-94a8-42e3-8986-dddbec83d755","Type":"ContainerDied","Data":"7a180c063d8433886b9b99fe8382a8d973c8b6b4d1425416261139533b679d98"} Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.896073 4787 scope.go:117] "RemoveContainer" containerID="40ccd79f42857c9cc5f59993f886e0ed52b8d538b0a585d795f0fbaf3b3c58f2" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.924660 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cgwgp"] Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.924774 4787 scope.go:117] "RemoveContainer" containerID="4c62d29717da4e2dafd29f1d1052ba2a291d903a0149abf52208f5ae61b47bc9" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.927020 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cgwgp"] Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.934180 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0368ea79-94a8-42e3-8986-dddbec83d755-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.934209 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0368ea79-94a8-42e3-8986-dddbec83d755-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.934219 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5r7l\" (UniqueName: \"kubernetes.io/projected/0368ea79-94a8-42e3-8986-dddbec83d755-kube-api-access-z5r7l\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.951758 4787 scope.go:117] "RemoveContainer" containerID="da0792067324438fea958c400a5fb011057b3895c2b4ec11d5379beb14d61d0d" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.971060 4787 scope.go:117] "RemoveContainer" containerID="40ccd79f42857c9cc5f59993f886e0ed52b8d538b0a585d795f0fbaf3b3c58f2" Jan 27 07:55:16 crc kubenswrapper[4787]: E0127 07:55:16.971514 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40ccd79f42857c9cc5f59993f886e0ed52b8d538b0a585d795f0fbaf3b3c58f2\": container with ID starting with 40ccd79f42857c9cc5f59993f886e0ed52b8d538b0a585d795f0fbaf3b3c58f2 not found: ID does not exist" containerID="40ccd79f42857c9cc5f59993f886e0ed52b8d538b0a585d795f0fbaf3b3c58f2" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.971588 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40ccd79f42857c9cc5f59993f886e0ed52b8d538b0a585d795f0fbaf3b3c58f2"} err="failed to get container status \"40ccd79f42857c9cc5f59993f886e0ed52b8d538b0a585d795f0fbaf3b3c58f2\": rpc error: code = NotFound desc = could not find container \"40ccd79f42857c9cc5f59993f886e0ed52b8d538b0a585d795f0fbaf3b3c58f2\": container with ID starting with 40ccd79f42857c9cc5f59993f886e0ed52b8d538b0a585d795f0fbaf3b3c58f2 not found: ID does not exist" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.971613 4787 scope.go:117] "RemoveContainer" containerID="4c62d29717da4e2dafd29f1d1052ba2a291d903a0149abf52208f5ae61b47bc9" Jan 27 07:55:16 crc kubenswrapper[4787]: E0127 07:55:16.972100 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c62d29717da4e2dafd29f1d1052ba2a291d903a0149abf52208f5ae61b47bc9\": container with ID starting with 4c62d29717da4e2dafd29f1d1052ba2a291d903a0149abf52208f5ae61b47bc9 not found: ID does not exist" containerID="4c62d29717da4e2dafd29f1d1052ba2a291d903a0149abf52208f5ae61b47bc9" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.972126 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c62d29717da4e2dafd29f1d1052ba2a291d903a0149abf52208f5ae61b47bc9"} err="failed to get container status \"4c62d29717da4e2dafd29f1d1052ba2a291d903a0149abf52208f5ae61b47bc9\": rpc error: code = NotFound desc = could not find container \"4c62d29717da4e2dafd29f1d1052ba2a291d903a0149abf52208f5ae61b47bc9\": container with ID starting with 4c62d29717da4e2dafd29f1d1052ba2a291d903a0149abf52208f5ae61b47bc9 not found: ID does not exist" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.972143 4787 scope.go:117] "RemoveContainer" containerID="da0792067324438fea958c400a5fb011057b3895c2b4ec11d5379beb14d61d0d" Jan 27 07:55:16 crc kubenswrapper[4787]: E0127 07:55:16.972456 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da0792067324438fea958c400a5fb011057b3895c2b4ec11d5379beb14d61d0d\": container with ID starting with da0792067324438fea958c400a5fb011057b3895c2b4ec11d5379beb14d61d0d not found: ID does not exist" containerID="da0792067324438fea958c400a5fb011057b3895c2b4ec11d5379beb14d61d0d" Jan 27 07:55:16 crc kubenswrapper[4787]: I0127 07:55:16.972479 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da0792067324438fea958c400a5fb011057b3895c2b4ec11d5379beb14d61d0d"} err="failed to get container status \"da0792067324438fea958c400a5fb011057b3895c2b4ec11d5379beb14d61d0d\": rpc error: code = NotFound desc = could not find container \"da0792067324438fea958c400a5fb011057b3895c2b4ec11d5379beb14d61d0d\": container with ID starting with da0792067324438fea958c400a5fb011057b3895c2b4ec11d5379beb14d61d0d not found: ID does not exist" Jan 27 07:55:17 crc kubenswrapper[4787]: I0127 07:55:17.086991 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0368ea79-94a8-42e3-8986-dddbec83d755" path="/var/lib/kubelet/pods/0368ea79-94a8-42e3-8986-dddbec83d755/volumes" Jan 27 07:55:17 crc kubenswrapper[4787]: I0127 07:55:17.881232 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:55:17 crc kubenswrapper[4787]: I0127 07:55:17.927619 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.447204 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-777857c966-2h66n"] Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.447920 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-777857c966-2h66n" podUID="816883bf-afa6-47b8-b823-7603ebf5cc71" containerName="controller-manager" containerID="cri-o://5f1d64b82d7b846232716736a494d6727d50dc1adacd62ef1ab1de361e5c3498" gracePeriod=30 Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.531973 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr"] Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.532269 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" podUID="17fe0585-ad09-4585-9b4c-01698e2c295b" containerName="route-controller-manager" containerID="cri-o://dd74d0a47c882efd924ba6fb867cda61c83e9bb61f03bbd8c3f4c56cbfb22222" gracePeriod=30 Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.823485 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.823907 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.823984 4787 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.824879 4787 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748"} pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.824951 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" containerID="cri-o://8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748" gracePeriod=600 Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.954648 4787 generic.go:334] "Generic (PLEG): container finished" podID="17fe0585-ad09-4585-9b4c-01698e2c295b" containerID="dd74d0a47c882efd924ba6fb867cda61c83e9bb61f03bbd8c3f4c56cbfb22222" exitCode=0 Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.954745 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" event={"ID":"17fe0585-ad09-4585-9b4c-01698e2c295b","Type":"ContainerDied","Data":"dd74d0a47c882efd924ba6fb867cda61c83e9bb61f03bbd8c3f4c56cbfb22222"} Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.957725 4787 generic.go:334] "Generic (PLEG): container finished" podID="816883bf-afa6-47b8-b823-7603ebf5cc71" containerID="5f1d64b82d7b846232716736a494d6727d50dc1adacd62ef1ab1de361e5c3498" exitCode=0 Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.957795 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-777857c966-2h66n" event={"ID":"816883bf-afa6-47b8-b823-7603ebf5cc71","Type":"ContainerDied","Data":"5f1d64b82d7b846232716736a494d6727d50dc1adacd62ef1ab1de361e5c3498"} Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.960198 4787 generic.go:334] "Generic (PLEG): container finished" podID="f051e184-acac-47cf-9e04-9df648288715" containerID="8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748" exitCode=0 Jan 27 07:55:22 crc kubenswrapper[4787]: I0127 07:55:22.960230 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerDied","Data":"8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748"} Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.027867 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.119945 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.136847 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17fe0585-ad09-4585-9b4c-01698e2c295b-config\") pod \"17fe0585-ad09-4585-9b4c-01698e2c295b\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.137262 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17fe0585-ad09-4585-9b4c-01698e2c295b-serving-cert\") pod \"17fe0585-ad09-4585-9b4c-01698e2c295b\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.138715 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9b5xw\" (UniqueName: \"kubernetes.io/projected/17fe0585-ad09-4585-9b4c-01698e2c295b-kube-api-access-9b5xw\") pod \"17fe0585-ad09-4585-9b4c-01698e2c295b\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.138840 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/17fe0585-ad09-4585-9b4c-01698e2c295b-client-ca\") pod \"17fe0585-ad09-4585-9b4c-01698e2c295b\" (UID: \"17fe0585-ad09-4585-9b4c-01698e2c295b\") " Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.138058 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17fe0585-ad09-4585-9b4c-01698e2c295b-config" (OuterVolumeSpecName: "config") pod "17fe0585-ad09-4585-9b4c-01698e2c295b" (UID: "17fe0585-ad09-4585-9b4c-01698e2c295b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.139538 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17fe0585-ad09-4585-9b4c-01698e2c295b-client-ca" (OuterVolumeSpecName: "client-ca") pod "17fe0585-ad09-4585-9b4c-01698e2c295b" (UID: "17fe0585-ad09-4585-9b4c-01698e2c295b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.145793 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17fe0585-ad09-4585-9b4c-01698e2c295b-kube-api-access-9b5xw" (OuterVolumeSpecName: "kube-api-access-9b5xw") pod "17fe0585-ad09-4585-9b4c-01698e2c295b" (UID: "17fe0585-ad09-4585-9b4c-01698e2c295b"). InnerVolumeSpecName "kube-api-access-9b5xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.146013 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17fe0585-ad09-4585-9b4c-01698e2c295b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "17fe0585-ad09-4585-9b4c-01698e2c295b" (UID: "17fe0585-ad09-4585-9b4c-01698e2c295b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.240102 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/816883bf-afa6-47b8-b823-7603ebf5cc71-serving-cert\") pod \"816883bf-afa6-47b8-b823-7603ebf5cc71\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.240192 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-client-ca\") pod \"816883bf-afa6-47b8-b823-7603ebf5cc71\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.240235 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-config\") pod \"816883bf-afa6-47b8-b823-7603ebf5cc71\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.240266 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-proxy-ca-bundles\") pod \"816883bf-afa6-47b8-b823-7603ebf5cc71\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.240308 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7bl5\" (UniqueName: \"kubernetes.io/projected/816883bf-afa6-47b8-b823-7603ebf5cc71-kube-api-access-s7bl5\") pod \"816883bf-afa6-47b8-b823-7603ebf5cc71\" (UID: \"816883bf-afa6-47b8-b823-7603ebf5cc71\") " Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.241296 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-client-ca" (OuterVolumeSpecName: "client-ca") pod "816883bf-afa6-47b8-b823-7603ebf5cc71" (UID: "816883bf-afa6-47b8-b823-7603ebf5cc71"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.241392 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-config" (OuterVolumeSpecName: "config") pod "816883bf-afa6-47b8-b823-7603ebf5cc71" (UID: "816883bf-afa6-47b8-b823-7603ebf5cc71"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.241423 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "816883bf-afa6-47b8-b823-7603ebf5cc71" (UID: "816883bf-afa6-47b8-b823-7603ebf5cc71"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.241753 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.241788 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9b5xw\" (UniqueName: \"kubernetes.io/projected/17fe0585-ad09-4585-9b4c-01698e2c295b-kube-api-access-9b5xw\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.241807 4787 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.241822 4787 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/17fe0585-ad09-4585-9b4c-01698e2c295b-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.241836 4787 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17fe0585-ad09-4585-9b4c-01698e2c295b-config\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.241847 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17fe0585-ad09-4585-9b4c-01698e2c295b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.241859 4787 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/816883bf-afa6-47b8-b823-7603ebf5cc71-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.246184 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/816883bf-afa6-47b8-b823-7603ebf5cc71-kube-api-access-s7bl5" (OuterVolumeSpecName: "kube-api-access-s7bl5") pod "816883bf-afa6-47b8-b823-7603ebf5cc71" (UID: "816883bf-afa6-47b8-b823-7603ebf5cc71"). InnerVolumeSpecName "kube-api-access-s7bl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.246639 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/816883bf-afa6-47b8-b823-7603ebf5cc71-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "816883bf-afa6-47b8-b823-7603ebf5cc71" (UID: "816883bf-afa6-47b8-b823-7603ebf5cc71"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.343168 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7bl5\" (UniqueName: \"kubernetes.io/projected/816883bf-afa6-47b8-b823-7603ebf5cc71-kube-api-access-s7bl5\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.343635 4787 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/816883bf-afa6-47b8-b823-7603ebf5cc71-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.847697 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm"] Jan 27 07:55:23 crc kubenswrapper[4787]: E0127 07:55:23.848088 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0368ea79-94a8-42e3-8986-dddbec83d755" containerName="extract-content" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.848109 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="0368ea79-94a8-42e3-8986-dddbec83d755" containerName="extract-content" Jan 27 07:55:23 crc kubenswrapper[4787]: E0127 07:55:23.848127 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17fe0585-ad09-4585-9b4c-01698e2c295b" containerName="route-controller-manager" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.848136 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="17fe0585-ad09-4585-9b4c-01698e2c295b" containerName="route-controller-manager" Jan 27 07:55:23 crc kubenswrapper[4787]: E0127 07:55:23.848161 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="816883bf-afa6-47b8-b823-7603ebf5cc71" containerName="controller-manager" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.848170 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="816883bf-afa6-47b8-b823-7603ebf5cc71" containerName="controller-manager" Jan 27 07:55:23 crc kubenswrapper[4787]: E0127 07:55:23.848183 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0368ea79-94a8-42e3-8986-dddbec83d755" containerName="extract-utilities" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.848192 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="0368ea79-94a8-42e3-8986-dddbec83d755" containerName="extract-utilities" Jan 27 07:55:23 crc kubenswrapper[4787]: E0127 07:55:23.848202 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0368ea79-94a8-42e3-8986-dddbec83d755" containerName="registry-server" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.848211 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="0368ea79-94a8-42e3-8986-dddbec83d755" containerName="registry-server" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.848330 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="0368ea79-94a8-42e3-8986-dddbec83d755" containerName="registry-server" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.848351 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="816883bf-afa6-47b8-b823-7603ebf5cc71" containerName="controller-manager" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.848364 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="17fe0585-ad09-4585-9b4c-01698e2c295b" containerName="route-controller-manager" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.848997 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.855859 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw"] Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.857908 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.864824 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw"] Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.868793 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm"] Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.950767 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gv7tn\" (UniqueName: \"kubernetes.io/projected/50febbec-2599-4f05-8482-938e97baa480-kube-api-access-gv7tn\") pod \"route-controller-manager-77c8d84cd6-dnssw\" (UID: \"50febbec-2599-4f05-8482-938e97baa480\") " pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.950887 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50febbec-2599-4f05-8482-938e97baa480-serving-cert\") pod \"route-controller-manager-77c8d84cd6-dnssw\" (UID: \"50febbec-2599-4f05-8482-938e97baa480\") " pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.950915 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-proxy-ca-bundles\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.950939 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/50febbec-2599-4f05-8482-938e97baa480-client-ca\") pod \"route-controller-manager-77c8d84cd6-dnssw\" (UID: \"50febbec-2599-4f05-8482-938e97baa480\") " pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.950958 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wdxf\" (UniqueName: \"kubernetes.io/projected/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-kube-api-access-6wdxf\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.951053 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50febbec-2599-4f05-8482-938e97baa480-config\") pod \"route-controller-manager-77c8d84cd6-dnssw\" (UID: \"50febbec-2599-4f05-8482-938e97baa480\") " pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.951117 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-client-ca\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.951168 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-config\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.951202 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-serving-cert\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.969071 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" event={"ID":"17fe0585-ad09-4585-9b4c-01698e2c295b","Type":"ContainerDied","Data":"5d2b198f58bbf5e2fc1127f374689f197db99d090e6ffaf6f08fba9b1669dc76"} Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.969115 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.969154 4787 scope.go:117] "RemoveContainer" containerID="dd74d0a47c882efd924ba6fb867cda61c83e9bb61f03bbd8c3f4c56cbfb22222" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.973177 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-777857c966-2h66n" event={"ID":"816883bf-afa6-47b8-b823-7603ebf5cc71","Type":"ContainerDied","Data":"c09f056e6b522f051bebfcf76d79e3302d7058b741907e39732f827b109cc806"} Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.973250 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-777857c966-2h66n" Jan 27 07:55:23 crc kubenswrapper[4787]: I0127 07:55:23.977225 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerStarted","Data":"ef7962ef772271e4959fec84d91ffb697a93484f02f3f1aa59cfda3d789d7c84"} Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.004812 4787 scope.go:117] "RemoveContainer" containerID="5f1d64b82d7b846232716736a494d6727d50dc1adacd62ef1ab1de361e5c3498" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.018390 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr"] Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.024046 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-659f59795d-4hwcr"] Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.031169 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-777857c966-2h66n"] Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.034620 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-777857c966-2h66n"] Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.053243 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50febbec-2599-4f05-8482-938e97baa480-config\") pod \"route-controller-manager-77c8d84cd6-dnssw\" (UID: \"50febbec-2599-4f05-8482-938e97baa480\") " pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.053337 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-client-ca\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.053390 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-config\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.053428 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-serving-cert\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.053459 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gv7tn\" (UniqueName: \"kubernetes.io/projected/50febbec-2599-4f05-8482-938e97baa480-kube-api-access-gv7tn\") pod \"route-controller-manager-77c8d84cd6-dnssw\" (UID: \"50febbec-2599-4f05-8482-938e97baa480\") " pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.053564 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50febbec-2599-4f05-8482-938e97baa480-serving-cert\") pod \"route-controller-manager-77c8d84cd6-dnssw\" (UID: \"50febbec-2599-4f05-8482-938e97baa480\") " pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.053591 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-proxy-ca-bundles\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.053610 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/50febbec-2599-4f05-8482-938e97baa480-client-ca\") pod \"route-controller-manager-77c8d84cd6-dnssw\" (UID: \"50febbec-2599-4f05-8482-938e97baa480\") " pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.053629 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wdxf\" (UniqueName: \"kubernetes.io/projected/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-kube-api-access-6wdxf\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.057017 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50febbec-2599-4f05-8482-938e97baa480-config\") pod \"route-controller-manager-77c8d84cd6-dnssw\" (UID: \"50febbec-2599-4f05-8482-938e97baa480\") " pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.054693 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/50febbec-2599-4f05-8482-938e97baa480-client-ca\") pod \"route-controller-manager-77c8d84cd6-dnssw\" (UID: \"50febbec-2599-4f05-8482-938e97baa480\") " pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.058313 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-config\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.059203 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-client-ca\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.059873 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50febbec-2599-4f05-8482-938e97baa480-serving-cert\") pod \"route-controller-manager-77c8d84cd6-dnssw\" (UID: \"50febbec-2599-4f05-8482-938e97baa480\") " pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.066356 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-serving-cert\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.071192 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gv7tn\" (UniqueName: \"kubernetes.io/projected/50febbec-2599-4f05-8482-938e97baa480-kube-api-access-gv7tn\") pod \"route-controller-manager-77c8d84cd6-dnssw\" (UID: \"50febbec-2599-4f05-8482-938e97baa480\") " pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.072758 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-proxy-ca-bundles\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.076335 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wdxf\" (UniqueName: \"kubernetes.io/projected/bcf43325-3dcd-41bf-aa3f-8741d7cb20f7-kube-api-access-6wdxf\") pod \"controller-manager-7f8f44c5c4-qt2tm\" (UID: \"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7\") " pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.174506 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.183327 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.457937 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm"] Jan 27 07:55:24 crc kubenswrapper[4787]: W0127 07:55:24.476698 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbcf43325_3dcd_41bf_aa3f_8741d7cb20f7.slice/crio-9c919a94fa60b1a23da41c93e22175cf0959eb4014eddc15250287cd46a98e6b WatchSource:0}: Error finding container 9c919a94fa60b1a23da41c93e22175cf0959eb4014eddc15250287cd46a98e6b: Status 404 returned error can't find the container with id 9c919a94fa60b1a23da41c93e22175cf0959eb4014eddc15250287cd46a98e6b Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.512960 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw"] Jan 27 07:55:24 crc kubenswrapper[4787]: W0127 07:55:24.526712 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50febbec_2599_4f05_8482_938e97baa480.slice/crio-69eae831a34390bb4f70a1e1495a4d1c1c9d40a31b059ff9f27d0d326162fa1b WatchSource:0}: Error finding container 69eae831a34390bb4f70a1e1495a4d1c1c9d40a31b059ff9f27d0d326162fa1b: Status 404 returned error can't find the container with id 69eae831a34390bb4f70a1e1495a4d1c1c9d40a31b059ff9f27d0d326162fa1b Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.988071 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" event={"ID":"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7","Type":"ContainerStarted","Data":"5132e561fe24db2363792d271da994726d3188d66693d1e5bf841773ea1429e5"} Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.988680 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" event={"ID":"bcf43325-3dcd-41bf-aa3f-8741d7cb20f7","Type":"ContainerStarted","Data":"9c919a94fa60b1a23da41c93e22175cf0959eb4014eddc15250287cd46a98e6b"} Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.988722 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.991236 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" event={"ID":"50febbec-2599-4f05-8482-938e97baa480","Type":"ContainerStarted","Data":"f212a3e2eaa7a378e3de8d39bbadf3523fe6313064da1af261e8fed4159ff76e"} Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.991294 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" event={"ID":"50febbec-2599-4f05-8482-938e97baa480","Type":"ContainerStarted","Data":"69eae831a34390bb4f70a1e1495a4d1c1c9d40a31b059ff9f27d0d326162fa1b"} Jan 27 07:55:24 crc kubenswrapper[4787]: I0127 07:55:24.994718 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" Jan 27 07:55:25 crc kubenswrapper[4787]: I0127 07:55:25.019326 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7f8f44c5c4-qt2tm" podStartSLOduration=3.019304334 podStartE2EDuration="3.019304334s" podCreationTimestamp="2026-01-27 07:55:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:55:25.018332552 +0000 UTC m=+230.670688064" watchObservedRunningTime="2026-01-27 07:55:25.019304334 +0000 UTC m=+230.671659826" Jan 27 07:55:25 crc kubenswrapper[4787]: I0127 07:55:25.062381 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" podStartSLOduration=3.062356264 podStartE2EDuration="3.062356264s" podCreationTimestamp="2026-01-27 07:55:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:55:25.061028658 +0000 UTC m=+230.713384140" watchObservedRunningTime="2026-01-27 07:55:25.062356264 +0000 UTC m=+230.714711756" Jan 27 07:55:25 crc kubenswrapper[4787]: I0127 07:55:25.095191 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17fe0585-ad09-4585-9b4c-01698e2c295b" path="/var/lib/kubelet/pods/17fe0585-ad09-4585-9b4c-01698e2c295b/volumes" Jan 27 07:55:25 crc kubenswrapper[4787]: I0127 07:55:25.096105 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="816883bf-afa6-47b8-b823-7603ebf5cc71" path="/var/lib/kubelet/pods/816883bf-afa6-47b8-b823-7603ebf5cc71/volumes" Jan 27 07:55:25 crc kubenswrapper[4787]: I0127 07:55:25.998284 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:26 crc kubenswrapper[4787]: I0127 07:55:26.008898 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-77c8d84cd6-dnssw" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.292262 4787 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.294576 4787 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.294754 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.295055 4787 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.295039 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df" gracePeriod=15 Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.295074 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28" gracePeriod=15 Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.295158 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5" gracePeriod=15 Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.295248 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986" gracePeriod=15 Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.295287 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b" gracePeriod=15 Jan 27 07:55:33 crc kubenswrapper[4787]: E0127 07:55:33.295662 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.295714 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 27 07:55:33 crc kubenswrapper[4787]: E0127 07:55:33.295749 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.295767 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 27 07:55:33 crc kubenswrapper[4787]: E0127 07:55:33.295788 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.295860 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 07:55:33 crc kubenswrapper[4787]: E0127 07:55:33.295881 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.295896 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 27 07:55:33 crc kubenswrapper[4787]: E0127 07:55:33.295919 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.295933 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 27 07:55:33 crc kubenswrapper[4787]: E0127 07:55:33.295961 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.295976 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 07:55:33 crc kubenswrapper[4787]: E0127 07:55:33.296017 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.296033 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.296282 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.296315 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.296337 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.296359 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.296379 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.296395 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.296424 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 27 07:55:33 crc kubenswrapper[4787]: E0127 07:55:33.296686 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.296712 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.299643 4787 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.430309 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.430811 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.430852 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.430902 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.430936 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.430965 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.430994 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.431028 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.532620 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.532691 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.532764 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.532807 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.532848 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.532884 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.532931 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.532996 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.533048 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.533110 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.533149 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.533194 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.533230 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.533247 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.533269 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:33 crc kubenswrapper[4787]: I0127 07:55:33.533287 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:34 crc kubenswrapper[4787]: I0127 07:55:34.086902 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 27 07:55:34 crc kubenswrapper[4787]: I0127 07:55:34.089151 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 27 07:55:34 crc kubenswrapper[4787]: I0127 07:55:34.090042 4787 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28" exitCode=0 Jan 27 07:55:34 crc kubenswrapper[4787]: I0127 07:55:34.090084 4787 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b" exitCode=0 Jan 27 07:55:34 crc kubenswrapper[4787]: I0127 07:55:34.090098 4787 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5" exitCode=0 Jan 27 07:55:34 crc kubenswrapper[4787]: I0127 07:55:34.090112 4787 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986" exitCode=2 Jan 27 07:55:34 crc kubenswrapper[4787]: I0127 07:55:34.090226 4787 scope.go:117] "RemoveContainer" containerID="879d855210b326ad3cbb964024c0b48b7373519deae8b974b9f5864416c15ef3" Jan 27 07:55:34 crc kubenswrapper[4787]: I0127 07:55:34.093291 4787 generic.go:334] "Generic (PLEG): container finished" podID="d78a135f-a7de-4bb7-b2aa-168fb353658f" containerID="86b21223e074e0eb982b5516ae2faf73f9fccba44d21367b5f9b9319a1d62acc" exitCode=0 Jan 27 07:55:34 crc kubenswrapper[4787]: I0127 07:55:34.093367 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d78a135f-a7de-4bb7-b2aa-168fb353658f","Type":"ContainerDied","Data":"86b21223e074e0eb982b5516ae2faf73f9fccba44d21367b5f9b9319a1d62acc"} Jan 27 07:55:34 crc kubenswrapper[4787]: I0127 07:55:34.095429 4787 status_manager.go:851] "Failed to get status for pod" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:34 crc kubenswrapper[4787]: E0127 07:55:34.176185 4787 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:34 crc kubenswrapper[4787]: E0127 07:55:34.176772 4787 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:34 crc kubenswrapper[4787]: E0127 07:55:34.177087 4787 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:34 crc kubenswrapper[4787]: E0127 07:55:34.177258 4787 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:34 crc kubenswrapper[4787]: E0127 07:55:34.177424 4787 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:34 crc kubenswrapper[4787]: I0127 07:55:34.177449 4787 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 27 07:55:34 crc kubenswrapper[4787]: E0127 07:55:34.177626 4787 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="200ms" Jan 27 07:55:34 crc kubenswrapper[4787]: E0127 07:55:34.379295 4787 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="400ms" Jan 27 07:55:34 crc kubenswrapper[4787]: E0127 07:55:34.780913 4787 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="800ms" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.087657 4787 status_manager.go:851] "Failed to get status for pod" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.104076 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.510790 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.511873 4787 status_manager.go:851] "Failed to get status for pod" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.575776 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d78a135f-a7de-4bb7-b2aa-168fb353658f-var-lock\") pod \"d78a135f-a7de-4bb7-b2aa-168fb353658f\" (UID: \"d78a135f-a7de-4bb7-b2aa-168fb353658f\") " Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.576163 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d78a135f-a7de-4bb7-b2aa-168fb353658f-kubelet-dir\") pod \"d78a135f-a7de-4bb7-b2aa-168fb353658f\" (UID: \"d78a135f-a7de-4bb7-b2aa-168fb353658f\") " Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.575906 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d78a135f-a7de-4bb7-b2aa-168fb353658f-var-lock" (OuterVolumeSpecName: "var-lock") pod "d78a135f-a7de-4bb7-b2aa-168fb353658f" (UID: "d78a135f-a7de-4bb7-b2aa-168fb353658f"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.576245 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d78a135f-a7de-4bb7-b2aa-168fb353658f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d78a135f-a7de-4bb7-b2aa-168fb353658f" (UID: "d78a135f-a7de-4bb7-b2aa-168fb353658f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.576343 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d78a135f-a7de-4bb7-b2aa-168fb353658f-kube-api-access\") pod \"d78a135f-a7de-4bb7-b2aa-168fb353658f\" (UID: \"d78a135f-a7de-4bb7-b2aa-168fb353658f\") " Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.576992 4787 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d78a135f-a7de-4bb7-b2aa-168fb353658f-var-lock\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.577015 4787 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d78a135f-a7de-4bb7-b2aa-168fb353658f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:35 crc kubenswrapper[4787]: E0127 07:55:35.582310 4787 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="1.6s" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.589445 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d78a135f-a7de-4bb7-b2aa-168fb353658f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d78a135f-a7de-4bb7-b2aa-168fb353658f" (UID: "d78a135f-a7de-4bb7-b2aa-168fb353658f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.679361 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d78a135f-a7de-4bb7-b2aa-168fb353658f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.681109 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.682405 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.682931 4787 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.683103 4787 status_manager.go:851] "Failed to get status for pod" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.780230 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.780355 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.780408 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.780821 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.780880 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.780911 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.882834 4787 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.882884 4787 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:35 crc kubenswrapper[4787]: I0127 07:55:35.882896 4787 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.114902 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d78a135f-a7de-4bb7-b2aa-168fb353658f","Type":"ContainerDied","Data":"c5a010ebcd473be981facf7a933739c86185647f5b265aa8a4c5d4030136ffd4"} Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.114946 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.114958 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5a010ebcd473be981facf7a933739c86185647f5b265aa8a4c5d4030136ffd4" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.119227 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.120181 4787 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df" exitCode=0 Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.120255 4787 scope.go:117] "RemoveContainer" containerID="215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.120306 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.133185 4787 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.133757 4787 status_manager.go:851] "Failed to get status for pod" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.142387 4787 scope.go:117] "RemoveContainer" containerID="f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.142386 4787 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.142958 4787 status_manager.go:851] "Failed to get status for pod" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.158705 4787 scope.go:117] "RemoveContainer" containerID="53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.177167 4787 scope.go:117] "RemoveContainer" containerID="af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.193311 4787 scope.go:117] "RemoveContainer" containerID="35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.216303 4787 scope.go:117] "RemoveContainer" containerID="c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.236944 4787 scope.go:117] "RemoveContainer" containerID="215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28" Jan 27 07:55:36 crc kubenswrapper[4787]: E0127 07:55:36.237395 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\": container with ID starting with 215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28 not found: ID does not exist" containerID="215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.237435 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28"} err="failed to get container status \"215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\": rpc error: code = NotFound desc = could not find container \"215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28\": container with ID starting with 215bbc991c576293e40d9ba239e697bfad702383af70483ddf0acea311b4ae28 not found: ID does not exist" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.237463 4787 scope.go:117] "RemoveContainer" containerID="f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b" Jan 27 07:55:36 crc kubenswrapper[4787]: E0127 07:55:36.237960 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\": container with ID starting with f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b not found: ID does not exist" containerID="f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.237998 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b"} err="failed to get container status \"f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\": rpc error: code = NotFound desc = could not find container \"f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b\": container with ID starting with f3424e1f77a68fac6a8ccd12e018ce28850817c99e72cef7edbff0941124c46b not found: ID does not exist" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.238015 4787 scope.go:117] "RemoveContainer" containerID="53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5" Jan 27 07:55:36 crc kubenswrapper[4787]: E0127 07:55:36.238292 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\": container with ID starting with 53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5 not found: ID does not exist" containerID="53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.238322 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5"} err="failed to get container status \"53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\": rpc error: code = NotFound desc = could not find container \"53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5\": container with ID starting with 53dd18ffb0f39abccd8edc121977448f4c491f6be9acee866dde8cbeb7c52ab5 not found: ID does not exist" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.238337 4787 scope.go:117] "RemoveContainer" containerID="af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986" Jan 27 07:55:36 crc kubenswrapper[4787]: E0127 07:55:36.239265 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\": container with ID starting with af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986 not found: ID does not exist" containerID="af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.239292 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986"} err="failed to get container status \"af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\": rpc error: code = NotFound desc = could not find container \"af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986\": container with ID starting with af5452f3980a8a4614721cb60fffffb3b1f3f5d8d82928249857ed2dd3fb5986 not found: ID does not exist" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.239320 4787 scope.go:117] "RemoveContainer" containerID="35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df" Jan 27 07:55:36 crc kubenswrapper[4787]: E0127 07:55:36.239663 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\": container with ID starting with 35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df not found: ID does not exist" containerID="35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.239686 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df"} err="failed to get container status \"35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\": rpc error: code = NotFound desc = could not find container \"35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df\": container with ID starting with 35e90c0899d9ae9ea7cce3f5904955f1f3a80e147efbd766e9ed3b34014f40df not found: ID does not exist" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.239703 4787 scope.go:117] "RemoveContainer" containerID="c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07" Jan 27 07:55:36 crc kubenswrapper[4787]: E0127 07:55:36.239993 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\": container with ID starting with c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07 not found: ID does not exist" containerID="c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07" Jan 27 07:55:36 crc kubenswrapper[4787]: I0127 07:55:36.240016 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07"} err="failed to get container status \"c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\": rpc error: code = NotFound desc = could not find container \"c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07\": container with ID starting with c37afd8d2562157729aca3685d7abae2bb6b9e081c2b456706dde875fd762c07 not found: ID does not exist" Jan 27 07:55:37 crc kubenswrapper[4787]: I0127 07:55:37.086052 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 27 07:55:37 crc kubenswrapper[4787]: E0127 07:55:37.183773 4787 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="3.2s" Jan 27 07:55:38 crc kubenswrapper[4787]: E0127 07:55:38.339862 4787 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.181:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:38 crc kubenswrapper[4787]: I0127 07:55:38.340659 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:38 crc kubenswrapper[4787]: W0127 07:55:38.376276 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-21f446a50fff424bd33d3e9272ceb55cbd687e3d8ff6f7819f58b150d5a98b7d WatchSource:0}: Error finding container 21f446a50fff424bd33d3e9272ceb55cbd687e3d8ff6f7819f58b150d5a98b7d: Status 404 returned error can't find the container with id 21f446a50fff424bd33d3e9272ceb55cbd687e3d8ff6f7819f58b150d5a98b7d Jan 27 07:55:38 crc kubenswrapper[4787]: E0127 07:55:38.380653 4787 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.181:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188e875d4bfccc34 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-27 07:55:38.379750452 +0000 UTC m=+244.032105944,LastTimestamp:2026-01-27 07:55:38.379750452 +0000 UTC m=+244.032105944,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 27 07:55:39 crc kubenswrapper[4787]: I0127 07:55:39.141260 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"f4458c08183867e68d981a00f792886a32dfbdc05716b6e94fea396b9dc48aef"} Jan 27 07:55:39 crc kubenswrapper[4787]: I0127 07:55:39.141910 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"21f446a50fff424bd33d3e9272ceb55cbd687e3d8ff6f7819f58b150d5a98b7d"} Jan 27 07:55:39 crc kubenswrapper[4787]: I0127 07:55:39.142749 4787 status_manager.go:851] "Failed to get status for pod" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:39 crc kubenswrapper[4787]: E0127 07:55:39.142759 4787 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.181:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:55:40 crc kubenswrapper[4787]: E0127 07:55:40.386182 4787 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="6.4s" Jan 27 07:55:44 crc kubenswrapper[4787]: E0127 07:55:44.276388 4787 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.181:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188e875d4bfccc34 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-27 07:55:38.379750452 +0000 UTC m=+244.032105944,LastTimestamp:2026-01-27 07:55:38.379750452 +0000 UTC m=+244.032105944,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 27 07:55:45 crc kubenswrapper[4787]: I0127 07:55:45.082050 4787 status_manager.go:851] "Failed to get status for pod" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:46 crc kubenswrapper[4787]: E0127 07:55:46.788539 4787 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="7s" Jan 27 07:55:47 crc kubenswrapper[4787]: I0127 07:55:47.215798 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 27 07:55:47 crc kubenswrapper[4787]: I0127 07:55:47.216313 4787 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc" exitCode=1 Jan 27 07:55:47 crc kubenswrapper[4787]: I0127 07:55:47.216366 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc"} Jan 27 07:55:47 crc kubenswrapper[4787]: I0127 07:55:47.217219 4787 scope.go:117] "RemoveContainer" containerID="5c114c2274358e777b240765cca81ea54a2e334002317a86595b7bfec95a11fc" Jan 27 07:55:47 crc kubenswrapper[4787]: I0127 07:55:47.217754 4787 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:47 crc kubenswrapper[4787]: I0127 07:55:47.218230 4787 status_manager.go:851] "Failed to get status for pod" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:47 crc kubenswrapper[4787]: I0127 07:55:47.666318 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:55:48 crc kubenswrapper[4787]: I0127 07:55:48.076583 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:48 crc kubenswrapper[4787]: I0127 07:55:48.079133 4787 status_manager.go:851] "Failed to get status for pod" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:48 crc kubenswrapper[4787]: I0127 07:55:48.079956 4787 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:48 crc kubenswrapper[4787]: I0127 07:55:48.097333 4787 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="60eef58e-b2eb-43d9-a499-317083a89ca3" Jan 27 07:55:48 crc kubenswrapper[4787]: I0127 07:55:48.097382 4787 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="60eef58e-b2eb-43d9-a499-317083a89ca3" Jan 27 07:55:48 crc kubenswrapper[4787]: E0127 07:55:48.098406 4787 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:48 crc kubenswrapper[4787]: I0127 07:55:48.099506 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:48 crc kubenswrapper[4787]: W0127 07:55:48.136480 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-a3692298ea7f352b5f9fdfe2759bf58139ec526ee8697bef05964ee2aa853776 WatchSource:0}: Error finding container a3692298ea7f352b5f9fdfe2759bf58139ec526ee8697bef05964ee2aa853776: Status 404 returned error can't find the container with id a3692298ea7f352b5f9fdfe2759bf58139ec526ee8697bef05964ee2aa853776 Jan 27 07:55:48 crc kubenswrapper[4787]: I0127 07:55:48.224461 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a3692298ea7f352b5f9fdfe2759bf58139ec526ee8697bef05964ee2aa853776"} Jan 27 07:55:48 crc kubenswrapper[4787]: I0127 07:55:48.228382 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 27 07:55:48 crc kubenswrapper[4787]: I0127 07:55:48.228433 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"63a951246b7281cd3c5f01756adf84ab9703190160c4a4c451673d01fa32296b"} Jan 27 07:55:48 crc kubenswrapper[4787]: I0127 07:55:48.230123 4787 status_manager.go:851] "Failed to get status for pod" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:48 crc kubenswrapper[4787]: I0127 07:55:48.230982 4787 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:49 crc kubenswrapper[4787]: I0127 07:55:49.237097 4787 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="bf9ec3f1afa172cf32d1eed8b1918f285a270d70f45636c5b4822130ff205de0" exitCode=0 Jan 27 07:55:49 crc kubenswrapper[4787]: I0127 07:55:49.238444 4787 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="60eef58e-b2eb-43d9-a499-317083a89ca3" Jan 27 07:55:49 crc kubenswrapper[4787]: I0127 07:55:49.238467 4787 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="60eef58e-b2eb-43d9-a499-317083a89ca3" Jan 27 07:55:49 crc kubenswrapper[4787]: I0127 07:55:49.239005 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"bf9ec3f1afa172cf32d1eed8b1918f285a270d70f45636c5b4822130ff205de0"} Jan 27 07:55:49 crc kubenswrapper[4787]: I0127 07:55:49.239470 4787 status_manager.go:851] "Failed to get status for pod" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:49 crc kubenswrapper[4787]: E0127 07:55:49.239609 4787 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:49 crc kubenswrapper[4787]: I0127 07:55:49.239712 4787 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Jan 27 07:55:50 crc kubenswrapper[4787]: I0127 07:55:50.247916 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"befb8460e9bf525a501a5835a5d4107a1c31c4466fc9376b13ef57093e2955d2"} Jan 27 07:55:50 crc kubenswrapper[4787]: I0127 07:55:50.248412 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bb430e1a05cc1877596767b21be0c1e1b9816a42810838142ef45b31552ad0f1"} Jan 27 07:55:50 crc kubenswrapper[4787]: I0127 07:55:50.248432 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"1b49e20d24c460f7c54c5bc969385d0a3e09ae12ac5c86ff2845bf0b9b490713"} Jan 27 07:55:51 crc kubenswrapper[4787]: I0127 07:55:51.257231 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"baf0aad206f83118f5062666b5f30c32aa1bfc9fd4d7af7b5a41ac7173c54d59"} Jan 27 07:55:51 crc kubenswrapper[4787]: I0127 07:55:51.258456 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3400869e2e418edbb069a1afa9ed9ba38155a0c4bc80d704190d932b95fa1a0f"} Jan 27 07:55:51 crc kubenswrapper[4787]: I0127 07:55:51.258571 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:51 crc kubenswrapper[4787]: I0127 07:55:51.257726 4787 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="60eef58e-b2eb-43d9-a499-317083a89ca3" Jan 27 07:55:51 crc kubenswrapper[4787]: I0127 07:55:51.258725 4787 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="60eef58e-b2eb-43d9-a499-317083a89ca3" Jan 27 07:55:53 crc kubenswrapper[4787]: I0127 07:55:53.099652 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:53 crc kubenswrapper[4787]: I0127 07:55:53.099866 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:53 crc kubenswrapper[4787]: I0127 07:55:53.107034 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:54 crc kubenswrapper[4787]: I0127 07:55:54.957375 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:55:56 crc kubenswrapper[4787]: I0127 07:55:56.267402 4787 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:56 crc kubenswrapper[4787]: I0127 07:55:56.343179 4787 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="32c63c91-e6ee-4a8f-bddd-a71cf503fa60" Jan 27 07:55:57 crc kubenswrapper[4787]: I0127 07:55:57.291069 4787 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="60eef58e-b2eb-43d9-a499-317083a89ca3" Jan 27 07:55:57 crc kubenswrapper[4787]: I0127 07:55:57.291128 4787 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="60eef58e-b2eb-43d9-a499-317083a89ca3" Jan 27 07:55:57 crc kubenswrapper[4787]: I0127 07:55:57.300936 4787 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="32c63c91-e6ee-4a8f-bddd-a71cf503fa60" Jan 27 07:55:57 crc kubenswrapper[4787]: I0127 07:55:57.301779 4787 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://1b49e20d24c460f7c54c5bc969385d0a3e09ae12ac5c86ff2845bf0b9b490713" Jan 27 07:55:57 crc kubenswrapper[4787]: I0127 07:55:57.301810 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:55:57 crc kubenswrapper[4787]: I0127 07:55:57.666149 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:55:57 crc kubenswrapper[4787]: I0127 07:55:57.671151 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:55:58 crc kubenswrapper[4787]: I0127 07:55:58.297701 4787 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="60eef58e-b2eb-43d9-a499-317083a89ca3" Jan 27 07:55:58 crc kubenswrapper[4787]: I0127 07:55:58.297751 4787 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="60eef58e-b2eb-43d9-a499-317083a89ca3" Jan 27 07:55:58 crc kubenswrapper[4787]: I0127 07:55:58.302259 4787 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="32c63c91-e6ee-4a8f-bddd-a71cf503fa60" Jan 27 07:55:58 crc kubenswrapper[4787]: I0127 07:55:58.307611 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 07:56:05 crc kubenswrapper[4787]: I0127 07:56:05.749543 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 27 07:56:06 crc kubenswrapper[4787]: I0127 07:56:06.474838 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 27 07:56:06 crc kubenswrapper[4787]: I0127 07:56:06.601713 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 27 07:56:06 crc kubenswrapper[4787]: I0127 07:56:06.817174 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 27 07:56:06 crc kubenswrapper[4787]: I0127 07:56:06.948389 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 27 07:56:07 crc kubenswrapper[4787]: I0127 07:56:07.394151 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 27 07:56:07 crc kubenswrapper[4787]: I0127 07:56:07.397225 4787 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 27 07:56:07 crc kubenswrapper[4787]: I0127 07:56:07.406978 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 27 07:56:07 crc kubenswrapper[4787]: I0127 07:56:07.407087 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 27 07:56:07 crc kubenswrapper[4787]: I0127 07:56:07.415234 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 07:56:07 crc kubenswrapper[4787]: I0127 07:56:07.422897 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 27 07:56:07 crc kubenswrapper[4787]: I0127 07:56:07.445475 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=11.445450285 podStartE2EDuration="11.445450285s" podCreationTimestamp="2026-01-27 07:55:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:56:07.442250432 +0000 UTC m=+273.094605954" watchObservedRunningTime="2026-01-27 07:56:07.445450285 +0000 UTC m=+273.097805837" Jan 27 07:56:07 crc kubenswrapper[4787]: I0127 07:56:07.601138 4787 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 27 07:56:07 crc kubenswrapper[4787]: I0127 07:56:07.601905 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://f4458c08183867e68d981a00f792886a32dfbdc05716b6e94fea396b9dc48aef" gracePeriod=5 Jan 27 07:56:08 crc kubenswrapper[4787]: I0127 07:56:08.132622 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 27 07:56:08 crc kubenswrapper[4787]: I0127 07:56:08.152800 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 27 07:56:08 crc kubenswrapper[4787]: I0127 07:56:08.275956 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 27 07:56:08 crc kubenswrapper[4787]: I0127 07:56:08.294229 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 27 07:56:08 crc kubenswrapper[4787]: I0127 07:56:08.415088 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 27 07:56:08 crc kubenswrapper[4787]: I0127 07:56:08.450909 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 27 07:56:08 crc kubenswrapper[4787]: I0127 07:56:08.557462 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 27 07:56:08 crc kubenswrapper[4787]: I0127 07:56:08.621184 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.005006 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.064440 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.281652 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.286581 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.314229 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.320881 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.343786 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.534954 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.646392 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.684915 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.719403 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.796823 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.811891 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.828967 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.961797 4787 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 27 07:56:09 crc kubenswrapper[4787]: I0127 07:56:09.973810 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.026802 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.048055 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.157515 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.165384 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.221455 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.222930 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.228871 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.308875 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.393183 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.429602 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.512369 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.609038 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.613855 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.644978 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.669906 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.696626 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.807745 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.857593 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.875701 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.879925 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.916368 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 27 07:56:10 crc kubenswrapper[4787]: I0127 07:56:10.947017 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.081961 4787 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.104008 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.166154 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.174010 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.249701 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.300865 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.336430 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.343265 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.349167 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.363579 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.412543 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.445814 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.467859 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.481350 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.559642 4787 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.581372 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.758229 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.823757 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.885618 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.892192 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.893009 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.976272 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 27 07:56:11 crc kubenswrapper[4787]: I0127 07:56:11.989541 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.000371 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.002954 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.076534 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.091748 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.091874 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.135516 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.333708 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.397824 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.422460 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.441490 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.463356 4787 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.529708 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.541888 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.598316 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.639471 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.663000 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.759279 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.819350 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.882987 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.910180 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 27 07:56:12 crc kubenswrapper[4787]: I0127 07:56:12.931833 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.008317 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.113112 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.129465 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.176623 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.189749 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.189835 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.296064 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.296141 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.296208 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.296396 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.297281 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.297356 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.297461 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.297659 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.297795 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.297891 4787 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.297973 4787 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.310811 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.333810 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.387645 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.399490 4787 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.400373 4787 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.400485 4787 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.412423 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.412672 4787 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="f4458c08183867e68d981a00f792886a32dfbdc05716b6e94fea396b9dc48aef" exitCode=137 Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.412760 4787 scope.go:117] "RemoveContainer" containerID="f4458c08183867e68d981a00f792886a32dfbdc05716b6e94fea396b9dc48aef" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.412772 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.431695 4787 scope.go:117] "RemoveContainer" containerID="f4458c08183867e68d981a00f792886a32dfbdc05716b6e94fea396b9dc48aef" Jan 27 07:56:13 crc kubenswrapper[4787]: E0127 07:56:13.432345 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4458c08183867e68d981a00f792886a32dfbdc05716b6e94fea396b9dc48aef\": container with ID starting with f4458c08183867e68d981a00f792886a32dfbdc05716b6e94fea396b9dc48aef not found: ID does not exist" containerID="f4458c08183867e68d981a00f792886a32dfbdc05716b6e94fea396b9dc48aef" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.432383 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4458c08183867e68d981a00f792886a32dfbdc05716b6e94fea396b9dc48aef"} err="failed to get container status \"f4458c08183867e68d981a00f792886a32dfbdc05716b6e94fea396b9dc48aef\": rpc error: code = NotFound desc = could not find container \"f4458c08183867e68d981a00f792886a32dfbdc05716b6e94fea396b9dc48aef\": container with ID starting with f4458c08183867e68d981a00f792886a32dfbdc05716b6e94fea396b9dc48aef not found: ID does not exist" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.518599 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.583841 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.610294 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.747355 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.802831 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.850280 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.882658 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 27 07:56:13 crc kubenswrapper[4787]: I0127 07:56:13.984767 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.000427 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.081675 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.137179 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.165834 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.198320 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.227263 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.235845 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.287714 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.329411 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.476791 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.510580 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.523489 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.677088 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.681224 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.709190 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.787322 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.793450 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.838805 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.906060 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.933313 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 27 07:56:14 crc kubenswrapper[4787]: I0127 07:56:14.957813 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.075242 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.086702 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.109228 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.161359 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.234078 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.286872 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.293413 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.299651 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.312338 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.339870 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.350481 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.392010 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.544269 4787 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.563102 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.631984 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.645380 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.671749 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.738506 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.762265 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.841373 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.849825 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.949328 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.987659 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 27 07:56:15 crc kubenswrapper[4787]: I0127 07:56:15.996794 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.008638 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.122654 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.174877 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.221155 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.307602 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.312423 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.323252 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.325981 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.326054 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.492215 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.513173 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.569643 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.611362 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.661956 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.821792 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.826330 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.835618 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.844183 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.905538 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.936274 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.970208 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 27 07:56:16 crc kubenswrapper[4787]: I0127 07:56:16.993426 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 27 07:56:17 crc kubenswrapper[4787]: I0127 07:56:17.001744 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 27 07:56:17 crc kubenswrapper[4787]: I0127 07:56:17.103306 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 27 07:56:17 crc kubenswrapper[4787]: I0127 07:56:17.172660 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 27 07:56:17 crc kubenswrapper[4787]: I0127 07:56:17.289177 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 27 07:56:17 crc kubenswrapper[4787]: I0127 07:56:17.360054 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 27 07:56:17 crc kubenswrapper[4787]: I0127 07:56:17.360383 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 27 07:56:17 crc kubenswrapper[4787]: I0127 07:56:17.504888 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 27 07:56:17 crc kubenswrapper[4787]: I0127 07:56:17.573764 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 27 07:56:17 crc kubenswrapper[4787]: I0127 07:56:17.696813 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 27 07:56:17 crc kubenswrapper[4787]: I0127 07:56:17.798474 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 27 07:56:17 crc kubenswrapper[4787]: I0127 07:56:17.934426 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 27 07:56:17 crc kubenswrapper[4787]: I0127 07:56:17.935184 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 27 07:56:17 crc kubenswrapper[4787]: I0127 07:56:17.956907 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 27 07:56:17 crc kubenswrapper[4787]: I0127 07:56:17.991469 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.057178 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.125098 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.146766 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.150764 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.187975 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.199559 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.206465 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.261992 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.506639 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.617496 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.684998 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.750674 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.756884 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.844270 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.861823 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.862927 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.864411 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.869608 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.903616 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.957490 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.981794 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 27 07:56:18 crc kubenswrapper[4787]: I0127 07:56:18.984513 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.050685 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.077624 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.203543 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.225643 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.238406 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.281378 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.302791 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.383695 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.469480 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.480627 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.592490 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-npc57"] Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.592959 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-npc57" podUID="7af45749-cd7f-490a-b2f6-bf979ea6467e" containerName="registry-server" containerID="cri-o://3f0dd71506f96b5f1d29b34337fb17e1db8a01c5d142ebe84263d8b6c18e1d79" gracePeriod=30 Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.611150 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.611610 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tnxzl"] Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.611914 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tnxzl" podUID="44cc5af2-0636-4589-a988-e7e32bfea075" containerName="registry-server" containerID="cri-o://e3cf97ca374aafda820a97034b2b6a3f6e20f34e82f747d6f2cfb26dfc59a223" gracePeriod=30 Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.616745 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-h5d4g"] Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.616974 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" podUID="2e88d48e-6302-4a84-90a7-69446be90e4a" containerName="marketplace-operator" containerID="cri-o://380d74a932668a4ea80abdd7fc668dad563b863ffa1063b090e363f00e977eef" gracePeriod=30 Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.618223 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.633255 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ht6h9"] Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.633896 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ht6h9" podUID="624e9a13-c9c5-4ef3-8628-056bfc65338b" containerName="registry-server" containerID="cri-o://c1c232a68764a7e2a2ab4410f7d9d45a3f03c3775264008e4c5ac89fab212864" gracePeriod=30 Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.645592 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kkjnl"] Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.646067 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kkjnl" podUID="1e42281e-20ed-4105-866f-878ffbf6c6eb" containerName="registry-server" containerID="cri-o://8cb3ae0fcd39b44a275d8cd5c91a52e737b682bd55f4da55edd1a06941315d84" gracePeriod=30 Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.669210 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fkd29"] Jan 27 07:56:19 crc kubenswrapper[4787]: E0127 07:56:19.670379 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.670407 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 27 07:56:19 crc kubenswrapper[4787]: E0127 07:56:19.670431 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" containerName="installer" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.670440 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" containerName="installer" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.670588 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.670601 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="d78a135f-a7de-4bb7-b2aa-168fb353658f" containerName="installer" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.671393 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.681282 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fkd29"] Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.706419 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.799399 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/39675fef-cac4-48c9-bd77-e0ee695a5ab8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fkd29\" (UID: \"39675fef-cac4-48c9-bd77-e0ee695a5ab8\") " pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.799516 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5hm4\" (UniqueName: \"kubernetes.io/projected/39675fef-cac4-48c9-bd77-e0ee695a5ab8-kube-api-access-d5hm4\") pod \"marketplace-operator-79b997595-fkd29\" (UID: \"39675fef-cac4-48c9-bd77-e0ee695a5ab8\") " pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.799613 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/39675fef-cac4-48c9-bd77-e0ee695a5ab8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fkd29\" (UID: \"39675fef-cac4-48c9-bd77-e0ee695a5ab8\") " pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.901344 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/39675fef-cac4-48c9-bd77-e0ee695a5ab8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fkd29\" (UID: \"39675fef-cac4-48c9-bd77-e0ee695a5ab8\") " pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.901438 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/39675fef-cac4-48c9-bd77-e0ee695a5ab8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fkd29\" (UID: \"39675fef-cac4-48c9-bd77-e0ee695a5ab8\") " pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.901480 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5hm4\" (UniqueName: \"kubernetes.io/projected/39675fef-cac4-48c9-bd77-e0ee695a5ab8-kube-api-access-d5hm4\") pod \"marketplace-operator-79b997595-fkd29\" (UID: \"39675fef-cac4-48c9-bd77-e0ee695a5ab8\") " pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.904000 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/39675fef-cac4-48c9-bd77-e0ee695a5ab8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fkd29\" (UID: \"39675fef-cac4-48c9-bd77-e0ee695a5ab8\") " pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.921397 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/39675fef-cac4-48c9-bd77-e0ee695a5ab8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fkd29\" (UID: \"39675fef-cac4-48c9-bd77-e0ee695a5ab8\") " pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.926569 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5hm4\" (UniqueName: \"kubernetes.io/projected/39675fef-cac4-48c9-bd77-e0ee695a5ab8-kube-api-access-d5hm4\") pod \"marketplace-operator-79b997595-fkd29\" (UID: \"39675fef-cac4-48c9-bd77-e0ee695a5ab8\") " pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.967687 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 27 07:56:19 crc kubenswrapper[4787]: I0127 07:56:19.993430 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.037620 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.104529 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.154390 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.171040 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.171241 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.187801 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.206133 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44cc5af2-0636-4589-a988-e7e32bfea075-utilities\") pod \"44cc5af2-0636-4589-a988-e7e32bfea075\" (UID: \"44cc5af2-0636-4589-a988-e7e32bfea075\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.206211 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2e88d48e-6302-4a84-90a7-69446be90e4a-marketplace-trusted-ca\") pod \"2e88d48e-6302-4a84-90a7-69446be90e4a\" (UID: \"2e88d48e-6302-4a84-90a7-69446be90e4a\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.206240 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/624e9a13-c9c5-4ef3-8628-056bfc65338b-utilities\") pod \"624e9a13-c9c5-4ef3-8628-056bfc65338b\" (UID: \"624e9a13-c9c5-4ef3-8628-056bfc65338b\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.206272 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tmpp\" (UniqueName: \"kubernetes.io/projected/44cc5af2-0636-4589-a988-e7e32bfea075-kube-api-access-5tmpp\") pod \"44cc5af2-0636-4589-a988-e7e32bfea075\" (UID: \"44cc5af2-0636-4589-a988-e7e32bfea075\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.206304 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkvcd\" (UniqueName: \"kubernetes.io/projected/624e9a13-c9c5-4ef3-8628-056bfc65338b-kube-api-access-nkvcd\") pod \"624e9a13-c9c5-4ef3-8628-056bfc65338b\" (UID: \"624e9a13-c9c5-4ef3-8628-056bfc65338b\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.206363 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2e88d48e-6302-4a84-90a7-69446be90e4a-marketplace-operator-metrics\") pod \"2e88d48e-6302-4a84-90a7-69446be90e4a\" (UID: \"2e88d48e-6302-4a84-90a7-69446be90e4a\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.206437 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44cc5af2-0636-4589-a988-e7e32bfea075-catalog-content\") pod \"44cc5af2-0636-4589-a988-e7e32bfea075\" (UID: \"44cc5af2-0636-4589-a988-e7e32bfea075\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.206490 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/624e9a13-c9c5-4ef3-8628-056bfc65338b-catalog-content\") pod \"624e9a13-c9c5-4ef3-8628-056bfc65338b\" (UID: \"624e9a13-c9c5-4ef3-8628-056bfc65338b\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.206591 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nl2ds\" (UniqueName: \"kubernetes.io/projected/2e88d48e-6302-4a84-90a7-69446be90e4a-kube-api-access-nl2ds\") pod \"2e88d48e-6302-4a84-90a7-69446be90e4a\" (UID: \"2e88d48e-6302-4a84-90a7-69446be90e4a\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.212514 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e88d48e-6302-4a84-90a7-69446be90e4a-kube-api-access-nl2ds" (OuterVolumeSpecName: "kube-api-access-nl2ds") pod "2e88d48e-6302-4a84-90a7-69446be90e4a" (UID: "2e88d48e-6302-4a84-90a7-69446be90e4a"). InnerVolumeSpecName "kube-api-access-nl2ds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.213653 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/624e9a13-c9c5-4ef3-8628-056bfc65338b-utilities" (OuterVolumeSpecName: "utilities") pod "624e9a13-c9c5-4ef3-8628-056bfc65338b" (UID: "624e9a13-c9c5-4ef3-8628-056bfc65338b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.214486 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e88d48e-6302-4a84-90a7-69446be90e4a-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "2e88d48e-6302-4a84-90a7-69446be90e4a" (UID: "2e88d48e-6302-4a84-90a7-69446be90e4a"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.214793 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44cc5af2-0636-4589-a988-e7e32bfea075-utilities" (OuterVolumeSpecName: "utilities") pod "44cc5af2-0636-4589-a988-e7e32bfea075" (UID: "44cc5af2-0636-4589-a988-e7e32bfea075"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.219060 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/624e9a13-c9c5-4ef3-8628-056bfc65338b-kube-api-access-nkvcd" (OuterVolumeSpecName: "kube-api-access-nkvcd") pod "624e9a13-c9c5-4ef3-8628-056bfc65338b" (UID: "624e9a13-c9c5-4ef3-8628-056bfc65338b"). InnerVolumeSpecName "kube-api-access-nkvcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.219989 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e88d48e-6302-4a84-90a7-69446be90e4a-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "2e88d48e-6302-4a84-90a7-69446be90e4a" (UID: "2e88d48e-6302-4a84-90a7-69446be90e4a"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.224087 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44cc5af2-0636-4589-a988-e7e32bfea075-kube-api-access-5tmpp" (OuterVolumeSpecName: "kube-api-access-5tmpp") pod "44cc5af2-0636-4589-a988-e7e32bfea075" (UID: "44cc5af2-0636-4589-a988-e7e32bfea075"). InnerVolumeSpecName "kube-api-access-5tmpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.269196 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/624e9a13-c9c5-4ef3-8628-056bfc65338b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "624e9a13-c9c5-4ef3-8628-056bfc65338b" (UID: "624e9a13-c9c5-4ef3-8628-056bfc65338b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.277210 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44cc5af2-0636-4589-a988-e7e32bfea075-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "44cc5af2-0636-4589-a988-e7e32bfea075" (UID: "44cc5af2-0636-4589-a988-e7e32bfea075"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.301415 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.308045 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7af45749-cd7f-490a-b2f6-bf979ea6467e-utilities\") pod \"7af45749-cd7f-490a-b2f6-bf979ea6467e\" (UID: \"7af45749-cd7f-490a-b2f6-bf979ea6467e\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.308282 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7af45749-cd7f-490a-b2f6-bf979ea6467e-catalog-content\") pod \"7af45749-cd7f-490a-b2f6-bf979ea6467e\" (UID: \"7af45749-cd7f-490a-b2f6-bf979ea6467e\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.308327 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wz5bs\" (UniqueName: \"kubernetes.io/projected/7af45749-cd7f-490a-b2f6-bf979ea6467e-kube-api-access-wz5bs\") pod \"7af45749-cd7f-490a-b2f6-bf979ea6467e\" (UID: \"7af45749-cd7f-490a-b2f6-bf979ea6467e\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.308576 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nl2ds\" (UniqueName: \"kubernetes.io/projected/2e88d48e-6302-4a84-90a7-69446be90e4a-kube-api-access-nl2ds\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.308596 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44cc5af2-0636-4589-a988-e7e32bfea075-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.308606 4787 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2e88d48e-6302-4a84-90a7-69446be90e4a-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.308616 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/624e9a13-c9c5-4ef3-8628-056bfc65338b-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.308626 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tmpp\" (UniqueName: \"kubernetes.io/projected/44cc5af2-0636-4589-a988-e7e32bfea075-kube-api-access-5tmpp\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.308636 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkvcd\" (UniqueName: \"kubernetes.io/projected/624e9a13-c9c5-4ef3-8628-056bfc65338b-kube-api-access-nkvcd\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.308649 4787 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2e88d48e-6302-4a84-90a7-69446be90e4a-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.308658 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44cc5af2-0636-4589-a988-e7e32bfea075-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.308669 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/624e9a13-c9c5-4ef3-8628-056bfc65338b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.310390 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7af45749-cd7f-490a-b2f6-bf979ea6467e-utilities" (OuterVolumeSpecName: "utilities") pod "7af45749-cd7f-490a-b2f6-bf979ea6467e" (UID: "7af45749-cd7f-490a-b2f6-bf979ea6467e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.314385 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7af45749-cd7f-490a-b2f6-bf979ea6467e-kube-api-access-wz5bs" (OuterVolumeSpecName: "kube-api-access-wz5bs") pod "7af45749-cd7f-490a-b2f6-bf979ea6467e" (UID: "7af45749-cd7f-490a-b2f6-bf979ea6467e"). InnerVolumeSpecName "kube-api-access-wz5bs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.318917 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.397938 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.409283 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76lt5\" (UniqueName: \"kubernetes.io/projected/1e42281e-20ed-4105-866f-878ffbf6c6eb-kube-api-access-76lt5\") pod \"1e42281e-20ed-4105-866f-878ffbf6c6eb\" (UID: \"1e42281e-20ed-4105-866f-878ffbf6c6eb\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.409341 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e42281e-20ed-4105-866f-878ffbf6c6eb-utilities\") pod \"1e42281e-20ed-4105-866f-878ffbf6c6eb\" (UID: \"1e42281e-20ed-4105-866f-878ffbf6c6eb\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.409527 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e42281e-20ed-4105-866f-878ffbf6c6eb-catalog-content\") pod \"1e42281e-20ed-4105-866f-878ffbf6c6eb\" (UID: \"1e42281e-20ed-4105-866f-878ffbf6c6eb\") " Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.409773 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wz5bs\" (UniqueName: \"kubernetes.io/projected/7af45749-cd7f-490a-b2f6-bf979ea6467e-kube-api-access-wz5bs\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.409788 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7af45749-cd7f-490a-b2f6-bf979ea6467e-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.411924 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e42281e-20ed-4105-866f-878ffbf6c6eb-utilities" (OuterVolumeSpecName: "utilities") pod "1e42281e-20ed-4105-866f-878ffbf6c6eb" (UID: "1e42281e-20ed-4105-866f-878ffbf6c6eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.412831 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e42281e-20ed-4105-866f-878ffbf6c6eb-kube-api-access-76lt5" (OuterVolumeSpecName: "kube-api-access-76lt5") pod "1e42281e-20ed-4105-866f-878ffbf6c6eb" (UID: "1e42281e-20ed-4105-866f-878ffbf6c6eb"). InnerVolumeSpecName "kube-api-access-76lt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.420974 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7af45749-cd7f-490a-b2f6-bf979ea6467e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7af45749-cd7f-490a-b2f6-bf979ea6467e" (UID: "7af45749-cd7f-490a-b2f6-bf979ea6467e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.427106 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.459130 4787 generic.go:334] "Generic (PLEG): container finished" podID="2e88d48e-6302-4a84-90a7-69446be90e4a" containerID="380d74a932668a4ea80abdd7fc668dad563b863ffa1063b090e363f00e977eef" exitCode=0 Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.459210 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" event={"ID":"2e88d48e-6302-4a84-90a7-69446be90e4a","Type":"ContainerDied","Data":"380d74a932668a4ea80abdd7fc668dad563b863ffa1063b090e363f00e977eef"} Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.459249 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" event={"ID":"2e88d48e-6302-4a84-90a7-69446be90e4a","Type":"ContainerDied","Data":"080593f490256d1b9111cdbbbfb3bf578e73f07bd416e1297c741853a022c9d0"} Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.459271 4787 scope.go:117] "RemoveContainer" containerID="380d74a932668a4ea80abdd7fc668dad563b863ffa1063b090e363f00e977eef" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.459410 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-h5d4g" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.467762 4787 generic.go:334] "Generic (PLEG): container finished" podID="624e9a13-c9c5-4ef3-8628-056bfc65338b" containerID="c1c232a68764a7e2a2ab4410f7d9d45a3f03c3775264008e4c5ac89fab212864" exitCode=0 Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.467870 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ht6h9" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.467882 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ht6h9" event={"ID":"624e9a13-c9c5-4ef3-8628-056bfc65338b","Type":"ContainerDied","Data":"c1c232a68764a7e2a2ab4410f7d9d45a3f03c3775264008e4c5ac89fab212864"} Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.467953 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ht6h9" event={"ID":"624e9a13-c9c5-4ef3-8628-056bfc65338b","Type":"ContainerDied","Data":"c2980947d8538d4e3d4cb9e62697c884d7b7e8b6d4185152424643323c016212"} Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.476352 4787 generic.go:334] "Generic (PLEG): container finished" podID="44cc5af2-0636-4589-a988-e7e32bfea075" containerID="e3cf97ca374aafda820a97034b2b6a3f6e20f34e82f747d6f2cfb26dfc59a223" exitCode=0 Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.476477 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tnxzl" event={"ID":"44cc5af2-0636-4589-a988-e7e32bfea075","Type":"ContainerDied","Data":"e3cf97ca374aafda820a97034b2b6a3f6e20f34e82f747d6f2cfb26dfc59a223"} Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.476536 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tnxzl" event={"ID":"44cc5af2-0636-4589-a988-e7e32bfea075","Type":"ContainerDied","Data":"81b4d3c6ee8e93f24d5baa8dea52cfb0d85fded6205579eedde915fa9ac03cb2"} Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.476590 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tnxzl" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.480420 4787 generic.go:334] "Generic (PLEG): container finished" podID="7af45749-cd7f-490a-b2f6-bf979ea6467e" containerID="3f0dd71506f96b5f1d29b34337fb17e1db8a01c5d142ebe84263d8b6c18e1d79" exitCode=0 Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.480627 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-npc57" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.480787 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-npc57" event={"ID":"7af45749-cd7f-490a-b2f6-bf979ea6467e","Type":"ContainerDied","Data":"3f0dd71506f96b5f1d29b34337fb17e1db8a01c5d142ebe84263d8b6c18e1d79"} Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.480835 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-npc57" event={"ID":"7af45749-cd7f-490a-b2f6-bf979ea6467e","Type":"ContainerDied","Data":"d4eb89ad2e8044f8b7ec7f7b1e0a8d0bd6b479506668e725c471bfff9235d2ce"} Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.480965 4787 scope.go:117] "RemoveContainer" containerID="380d74a932668a4ea80abdd7fc668dad563b863ffa1063b090e363f00e977eef" Jan 27 07:56:20 crc kubenswrapper[4787]: E0127 07:56:20.481539 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"380d74a932668a4ea80abdd7fc668dad563b863ffa1063b090e363f00e977eef\": container with ID starting with 380d74a932668a4ea80abdd7fc668dad563b863ffa1063b090e363f00e977eef not found: ID does not exist" containerID="380d74a932668a4ea80abdd7fc668dad563b863ffa1063b090e363f00e977eef" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.481599 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"380d74a932668a4ea80abdd7fc668dad563b863ffa1063b090e363f00e977eef"} err="failed to get container status \"380d74a932668a4ea80abdd7fc668dad563b863ffa1063b090e363f00e977eef\": rpc error: code = NotFound desc = could not find container \"380d74a932668a4ea80abdd7fc668dad563b863ffa1063b090e363f00e977eef\": container with ID starting with 380d74a932668a4ea80abdd7fc668dad563b863ffa1063b090e363f00e977eef not found: ID does not exist" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.481627 4787 scope.go:117] "RemoveContainer" containerID="c1c232a68764a7e2a2ab4410f7d9d45a3f03c3775264008e4c5ac89fab212864" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.485924 4787 generic.go:334] "Generic (PLEG): container finished" podID="1e42281e-20ed-4105-866f-878ffbf6c6eb" containerID="8cb3ae0fcd39b44a275d8cd5c91a52e737b682bd55f4da55edd1a06941315d84" exitCode=0 Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.485991 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kkjnl" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.486002 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kkjnl" event={"ID":"1e42281e-20ed-4105-866f-878ffbf6c6eb","Type":"ContainerDied","Data":"8cb3ae0fcd39b44a275d8cd5c91a52e737b682bd55f4da55edd1a06941315d84"} Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.486438 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kkjnl" event={"ID":"1e42281e-20ed-4105-866f-878ffbf6c6eb","Type":"ContainerDied","Data":"69a14500beb72f949103663a0b1a2f611ef846aca8d992b4feec769e4103a913"} Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.508544 4787 scope.go:117] "RemoveContainer" containerID="ec5e3c96098a08d05b7b94cefe4b2f9c131e71b862786c006019c2dec6da5498" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.511586 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7af45749-cd7f-490a-b2f6-bf979ea6467e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.511617 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e42281e-20ed-4105-866f-878ffbf6c6eb-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.511629 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76lt5\" (UniqueName: \"kubernetes.io/projected/1e42281e-20ed-4105-866f-878ffbf6c6eb-kube-api-access-76lt5\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.511775 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-h5d4g"] Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.519802 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-h5d4g"] Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.527731 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ht6h9"] Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.534352 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ht6h9"] Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.539611 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tnxzl"] Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.545826 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tnxzl"] Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.546982 4787 scope.go:117] "RemoveContainer" containerID="16105c08221c0a2bad9341270bf79d1c85ac086baa9a0934ed9322b6ec8a995e" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.550460 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-npc57"] Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.553993 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-npc57"] Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.558356 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e42281e-20ed-4105-866f-878ffbf6c6eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1e42281e-20ed-4105-866f-878ffbf6c6eb" (UID: "1e42281e-20ed-4105-866f-878ffbf6c6eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.562353 4787 scope.go:117] "RemoveContainer" containerID="c1c232a68764a7e2a2ab4410f7d9d45a3f03c3775264008e4c5ac89fab212864" Jan 27 07:56:20 crc kubenswrapper[4787]: E0127 07:56:20.563310 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1c232a68764a7e2a2ab4410f7d9d45a3f03c3775264008e4c5ac89fab212864\": container with ID starting with c1c232a68764a7e2a2ab4410f7d9d45a3f03c3775264008e4c5ac89fab212864 not found: ID does not exist" containerID="c1c232a68764a7e2a2ab4410f7d9d45a3f03c3775264008e4c5ac89fab212864" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.563367 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1c232a68764a7e2a2ab4410f7d9d45a3f03c3775264008e4c5ac89fab212864"} err="failed to get container status \"c1c232a68764a7e2a2ab4410f7d9d45a3f03c3775264008e4c5ac89fab212864\": rpc error: code = NotFound desc = could not find container \"c1c232a68764a7e2a2ab4410f7d9d45a3f03c3775264008e4c5ac89fab212864\": container with ID starting with c1c232a68764a7e2a2ab4410f7d9d45a3f03c3775264008e4c5ac89fab212864 not found: ID does not exist" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.563402 4787 scope.go:117] "RemoveContainer" containerID="ec5e3c96098a08d05b7b94cefe4b2f9c131e71b862786c006019c2dec6da5498" Jan 27 07:56:20 crc kubenswrapper[4787]: E0127 07:56:20.563913 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec5e3c96098a08d05b7b94cefe4b2f9c131e71b862786c006019c2dec6da5498\": container with ID starting with ec5e3c96098a08d05b7b94cefe4b2f9c131e71b862786c006019c2dec6da5498 not found: ID does not exist" containerID="ec5e3c96098a08d05b7b94cefe4b2f9c131e71b862786c006019c2dec6da5498" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.563956 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec5e3c96098a08d05b7b94cefe4b2f9c131e71b862786c006019c2dec6da5498"} err="failed to get container status \"ec5e3c96098a08d05b7b94cefe4b2f9c131e71b862786c006019c2dec6da5498\": rpc error: code = NotFound desc = could not find container \"ec5e3c96098a08d05b7b94cefe4b2f9c131e71b862786c006019c2dec6da5498\": container with ID starting with ec5e3c96098a08d05b7b94cefe4b2f9c131e71b862786c006019c2dec6da5498 not found: ID does not exist" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.563984 4787 scope.go:117] "RemoveContainer" containerID="16105c08221c0a2bad9341270bf79d1c85ac086baa9a0934ed9322b6ec8a995e" Jan 27 07:56:20 crc kubenswrapper[4787]: E0127 07:56:20.564333 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16105c08221c0a2bad9341270bf79d1c85ac086baa9a0934ed9322b6ec8a995e\": container with ID starting with 16105c08221c0a2bad9341270bf79d1c85ac086baa9a0934ed9322b6ec8a995e not found: ID does not exist" containerID="16105c08221c0a2bad9341270bf79d1c85ac086baa9a0934ed9322b6ec8a995e" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.564358 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16105c08221c0a2bad9341270bf79d1c85ac086baa9a0934ed9322b6ec8a995e"} err="failed to get container status \"16105c08221c0a2bad9341270bf79d1c85ac086baa9a0934ed9322b6ec8a995e\": rpc error: code = NotFound desc = could not find container \"16105c08221c0a2bad9341270bf79d1c85ac086baa9a0934ed9322b6ec8a995e\": container with ID starting with 16105c08221c0a2bad9341270bf79d1c85ac086baa9a0934ed9322b6ec8a995e not found: ID does not exist" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.564371 4787 scope.go:117] "RemoveContainer" containerID="e3cf97ca374aafda820a97034b2b6a3f6e20f34e82f747d6f2cfb26dfc59a223" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.577746 4787 scope.go:117] "RemoveContainer" containerID="0cc6e3133b3811c2501b6fd4d34b20a999454792da7dd131df5d09583da6565d" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.592790 4787 scope.go:117] "RemoveContainer" containerID="5ca52b27fd7e43c704f8e05cf0d0277c2c4f718ac46a1e5031c9baa689471895" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.606347 4787 scope.go:117] "RemoveContainer" containerID="e3cf97ca374aafda820a97034b2b6a3f6e20f34e82f747d6f2cfb26dfc59a223" Jan 27 07:56:20 crc kubenswrapper[4787]: E0127 07:56:20.606843 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3cf97ca374aafda820a97034b2b6a3f6e20f34e82f747d6f2cfb26dfc59a223\": container with ID starting with e3cf97ca374aafda820a97034b2b6a3f6e20f34e82f747d6f2cfb26dfc59a223 not found: ID does not exist" containerID="e3cf97ca374aafda820a97034b2b6a3f6e20f34e82f747d6f2cfb26dfc59a223" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.606873 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3cf97ca374aafda820a97034b2b6a3f6e20f34e82f747d6f2cfb26dfc59a223"} err="failed to get container status \"e3cf97ca374aafda820a97034b2b6a3f6e20f34e82f747d6f2cfb26dfc59a223\": rpc error: code = NotFound desc = could not find container \"e3cf97ca374aafda820a97034b2b6a3f6e20f34e82f747d6f2cfb26dfc59a223\": container with ID starting with e3cf97ca374aafda820a97034b2b6a3f6e20f34e82f747d6f2cfb26dfc59a223 not found: ID does not exist" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.606892 4787 scope.go:117] "RemoveContainer" containerID="0cc6e3133b3811c2501b6fd4d34b20a999454792da7dd131df5d09583da6565d" Jan 27 07:56:20 crc kubenswrapper[4787]: E0127 07:56:20.607338 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cc6e3133b3811c2501b6fd4d34b20a999454792da7dd131df5d09583da6565d\": container with ID starting with 0cc6e3133b3811c2501b6fd4d34b20a999454792da7dd131df5d09583da6565d not found: ID does not exist" containerID="0cc6e3133b3811c2501b6fd4d34b20a999454792da7dd131df5d09583da6565d" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.607418 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cc6e3133b3811c2501b6fd4d34b20a999454792da7dd131df5d09583da6565d"} err="failed to get container status \"0cc6e3133b3811c2501b6fd4d34b20a999454792da7dd131df5d09583da6565d\": rpc error: code = NotFound desc = could not find container \"0cc6e3133b3811c2501b6fd4d34b20a999454792da7dd131df5d09583da6565d\": container with ID starting with 0cc6e3133b3811c2501b6fd4d34b20a999454792da7dd131df5d09583da6565d not found: ID does not exist" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.607537 4787 scope.go:117] "RemoveContainer" containerID="5ca52b27fd7e43c704f8e05cf0d0277c2c4f718ac46a1e5031c9baa689471895" Jan 27 07:56:20 crc kubenswrapper[4787]: E0127 07:56:20.608359 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ca52b27fd7e43c704f8e05cf0d0277c2c4f718ac46a1e5031c9baa689471895\": container with ID starting with 5ca52b27fd7e43c704f8e05cf0d0277c2c4f718ac46a1e5031c9baa689471895 not found: ID does not exist" containerID="5ca52b27fd7e43c704f8e05cf0d0277c2c4f718ac46a1e5031c9baa689471895" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.608449 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ca52b27fd7e43c704f8e05cf0d0277c2c4f718ac46a1e5031c9baa689471895"} err="failed to get container status \"5ca52b27fd7e43c704f8e05cf0d0277c2c4f718ac46a1e5031c9baa689471895\": rpc error: code = NotFound desc = could not find container \"5ca52b27fd7e43c704f8e05cf0d0277c2c4f718ac46a1e5031c9baa689471895\": container with ID starting with 5ca52b27fd7e43c704f8e05cf0d0277c2c4f718ac46a1e5031c9baa689471895 not found: ID does not exist" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.608480 4787 scope.go:117] "RemoveContainer" containerID="3f0dd71506f96b5f1d29b34337fb17e1db8a01c5d142ebe84263d8b6c18e1d79" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.613078 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e42281e-20ed-4105-866f-878ffbf6c6eb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.628707 4787 scope.go:117] "RemoveContainer" containerID="629cf5f25077b459468d115da73f8db540be3990ca6244c0ac21ff90a31aed2d" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.657382 4787 scope.go:117] "RemoveContainer" containerID="ae2a0db24a496532988d8c9c232f3f25c682b2d988bff4fb3dec2a81481a0733" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.680696 4787 scope.go:117] "RemoveContainer" containerID="3f0dd71506f96b5f1d29b34337fb17e1db8a01c5d142ebe84263d8b6c18e1d79" Jan 27 07:56:20 crc kubenswrapper[4787]: E0127 07:56:20.685076 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f0dd71506f96b5f1d29b34337fb17e1db8a01c5d142ebe84263d8b6c18e1d79\": container with ID starting with 3f0dd71506f96b5f1d29b34337fb17e1db8a01c5d142ebe84263d8b6c18e1d79 not found: ID does not exist" containerID="3f0dd71506f96b5f1d29b34337fb17e1db8a01c5d142ebe84263d8b6c18e1d79" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.685153 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f0dd71506f96b5f1d29b34337fb17e1db8a01c5d142ebe84263d8b6c18e1d79"} err="failed to get container status \"3f0dd71506f96b5f1d29b34337fb17e1db8a01c5d142ebe84263d8b6c18e1d79\": rpc error: code = NotFound desc = could not find container \"3f0dd71506f96b5f1d29b34337fb17e1db8a01c5d142ebe84263d8b6c18e1d79\": container with ID starting with 3f0dd71506f96b5f1d29b34337fb17e1db8a01c5d142ebe84263d8b6c18e1d79 not found: ID does not exist" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.685201 4787 scope.go:117] "RemoveContainer" containerID="629cf5f25077b459468d115da73f8db540be3990ca6244c0ac21ff90a31aed2d" Jan 27 07:56:20 crc kubenswrapper[4787]: E0127 07:56:20.685748 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"629cf5f25077b459468d115da73f8db540be3990ca6244c0ac21ff90a31aed2d\": container with ID starting with 629cf5f25077b459468d115da73f8db540be3990ca6244c0ac21ff90a31aed2d not found: ID does not exist" containerID="629cf5f25077b459468d115da73f8db540be3990ca6244c0ac21ff90a31aed2d" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.685799 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"629cf5f25077b459468d115da73f8db540be3990ca6244c0ac21ff90a31aed2d"} err="failed to get container status \"629cf5f25077b459468d115da73f8db540be3990ca6244c0ac21ff90a31aed2d\": rpc error: code = NotFound desc = could not find container \"629cf5f25077b459468d115da73f8db540be3990ca6244c0ac21ff90a31aed2d\": container with ID starting with 629cf5f25077b459468d115da73f8db540be3990ca6244c0ac21ff90a31aed2d not found: ID does not exist" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.685825 4787 scope.go:117] "RemoveContainer" containerID="ae2a0db24a496532988d8c9c232f3f25c682b2d988bff4fb3dec2a81481a0733" Jan 27 07:56:20 crc kubenswrapper[4787]: E0127 07:56:20.686291 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae2a0db24a496532988d8c9c232f3f25c682b2d988bff4fb3dec2a81481a0733\": container with ID starting with ae2a0db24a496532988d8c9c232f3f25c682b2d988bff4fb3dec2a81481a0733 not found: ID does not exist" containerID="ae2a0db24a496532988d8c9c232f3f25c682b2d988bff4fb3dec2a81481a0733" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.686348 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae2a0db24a496532988d8c9c232f3f25c682b2d988bff4fb3dec2a81481a0733"} err="failed to get container status \"ae2a0db24a496532988d8c9c232f3f25c682b2d988bff4fb3dec2a81481a0733\": rpc error: code = NotFound desc = could not find container \"ae2a0db24a496532988d8c9c232f3f25c682b2d988bff4fb3dec2a81481a0733\": container with ID starting with ae2a0db24a496532988d8c9c232f3f25c682b2d988bff4fb3dec2a81481a0733 not found: ID does not exist" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.686392 4787 scope.go:117] "RemoveContainer" containerID="8cb3ae0fcd39b44a275d8cd5c91a52e737b682bd55f4da55edd1a06941315d84" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.688173 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fkd29"] Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.693709 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 27 07:56:20 crc kubenswrapper[4787]: W0127 07:56:20.696947 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39675fef_cac4_48c9_bd77_e0ee695a5ab8.slice/crio-eca5e6848d5a7c0627f7e7c3d5db9faa8e9c4bb27962c92bfa01ff60756ecf24 WatchSource:0}: Error finding container eca5e6848d5a7c0627f7e7c3d5db9faa8e9c4bb27962c92bfa01ff60756ecf24: Status 404 returned error can't find the container with id eca5e6848d5a7c0627f7e7c3d5db9faa8e9c4bb27962c92bfa01ff60756ecf24 Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.716534 4787 scope.go:117] "RemoveContainer" containerID="aba819bb87de56fd8e57aa84a316cf4c3386292e3c4ce220f195f806f8261dfd" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.745008 4787 scope.go:117] "RemoveContainer" containerID="71a0fd62f051e1733036586c55bfd4c513668f6b2af829fed34f76f2fd16d4a2" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.828436 4787 scope.go:117] "RemoveContainer" containerID="8cb3ae0fcd39b44a275d8cd5c91a52e737b682bd55f4da55edd1a06941315d84" Jan 27 07:56:20 crc kubenswrapper[4787]: E0127 07:56:20.829139 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cb3ae0fcd39b44a275d8cd5c91a52e737b682bd55f4da55edd1a06941315d84\": container with ID starting with 8cb3ae0fcd39b44a275d8cd5c91a52e737b682bd55f4da55edd1a06941315d84 not found: ID does not exist" containerID="8cb3ae0fcd39b44a275d8cd5c91a52e737b682bd55f4da55edd1a06941315d84" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.829212 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cb3ae0fcd39b44a275d8cd5c91a52e737b682bd55f4da55edd1a06941315d84"} err="failed to get container status \"8cb3ae0fcd39b44a275d8cd5c91a52e737b682bd55f4da55edd1a06941315d84\": rpc error: code = NotFound desc = could not find container \"8cb3ae0fcd39b44a275d8cd5c91a52e737b682bd55f4da55edd1a06941315d84\": container with ID starting with 8cb3ae0fcd39b44a275d8cd5c91a52e737b682bd55f4da55edd1a06941315d84 not found: ID does not exist" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.829256 4787 scope.go:117] "RemoveContainer" containerID="aba819bb87de56fd8e57aa84a316cf4c3386292e3c4ce220f195f806f8261dfd" Jan 27 07:56:20 crc kubenswrapper[4787]: E0127 07:56:20.829908 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aba819bb87de56fd8e57aa84a316cf4c3386292e3c4ce220f195f806f8261dfd\": container with ID starting with aba819bb87de56fd8e57aa84a316cf4c3386292e3c4ce220f195f806f8261dfd not found: ID does not exist" containerID="aba819bb87de56fd8e57aa84a316cf4c3386292e3c4ce220f195f806f8261dfd" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.830002 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aba819bb87de56fd8e57aa84a316cf4c3386292e3c4ce220f195f806f8261dfd"} err="failed to get container status \"aba819bb87de56fd8e57aa84a316cf4c3386292e3c4ce220f195f806f8261dfd\": rpc error: code = NotFound desc = could not find container \"aba819bb87de56fd8e57aa84a316cf4c3386292e3c4ce220f195f806f8261dfd\": container with ID starting with aba819bb87de56fd8e57aa84a316cf4c3386292e3c4ce220f195f806f8261dfd not found: ID does not exist" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.830046 4787 scope.go:117] "RemoveContainer" containerID="71a0fd62f051e1733036586c55bfd4c513668f6b2af829fed34f76f2fd16d4a2" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.830406 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 27 07:56:20 crc kubenswrapper[4787]: E0127 07:56:20.830693 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71a0fd62f051e1733036586c55bfd4c513668f6b2af829fed34f76f2fd16d4a2\": container with ID starting with 71a0fd62f051e1733036586c55bfd4c513668f6b2af829fed34f76f2fd16d4a2 not found: ID does not exist" containerID="71a0fd62f051e1733036586c55bfd4c513668f6b2af829fed34f76f2fd16d4a2" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.830727 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71a0fd62f051e1733036586c55bfd4c513668f6b2af829fed34f76f2fd16d4a2"} err="failed to get container status \"71a0fd62f051e1733036586c55bfd4c513668f6b2af829fed34f76f2fd16d4a2\": rpc error: code = NotFound desc = could not find container \"71a0fd62f051e1733036586c55bfd4c513668f6b2af829fed34f76f2fd16d4a2\": container with ID starting with 71a0fd62f051e1733036586c55bfd4c513668f6b2af829fed34f76f2fd16d4a2 not found: ID does not exist" Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.900111 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kkjnl"] Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.903338 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kkjnl"] Jan 27 07:56:20 crc kubenswrapper[4787]: I0127 07:56:20.962716 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.089530 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e42281e-20ed-4105-866f-878ffbf6c6eb" path="/var/lib/kubelet/pods/1e42281e-20ed-4105-866f-878ffbf6c6eb/volumes" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.091386 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e88d48e-6302-4a84-90a7-69446be90e4a" path="/var/lib/kubelet/pods/2e88d48e-6302-4a84-90a7-69446be90e4a/volumes" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.092270 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44cc5af2-0636-4589-a988-e7e32bfea075" path="/var/lib/kubelet/pods/44cc5af2-0636-4589-a988-e7e32bfea075/volumes" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.093914 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="624e9a13-c9c5-4ef3-8628-056bfc65338b" path="/var/lib/kubelet/pods/624e9a13-c9c5-4ef3-8628-056bfc65338b/volumes" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.094836 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7af45749-cd7f-490a-b2f6-bf979ea6467e" path="/var/lib/kubelet/pods/7af45749-cd7f-490a-b2f6-bf979ea6467e/volumes" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.186288 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.270793 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.290935 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.343568 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.498864 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" event={"ID":"39675fef-cac4-48c9-bd77-e0ee695a5ab8","Type":"ContainerStarted","Data":"57f56124a0e8fcf97a1c8bde7c6849d7575aae5bfa455464c971f808ebdc6c3b"} Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.498941 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" event={"ID":"39675fef-cac4-48c9-bd77-e0ee695a5ab8","Type":"ContainerStarted","Data":"eca5e6848d5a7c0627f7e7c3d5db9faa8e9c4bb27962c92bfa01ff60756ecf24"} Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.499194 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.503328 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.517776 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-fkd29" podStartSLOduration=2.517751002 podStartE2EDuration="2.517751002s" podCreationTimestamp="2026-01-27 07:56:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:56:21.51690414 +0000 UTC m=+287.169259672" watchObservedRunningTime="2026-01-27 07:56:21.517751002 +0000 UTC m=+287.170106504" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.600108 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.630827 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 27 07:56:21 crc kubenswrapper[4787]: I0127 07:56:21.933434 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 27 07:56:22 crc kubenswrapper[4787]: I0127 07:56:22.000359 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 27 07:56:22 crc kubenswrapper[4787]: I0127 07:56:22.224294 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 27 07:56:33 crc kubenswrapper[4787]: I0127 07:56:33.863959 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 27 07:56:34 crc kubenswrapper[4787]: I0127 07:56:34.858681 4787 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 27 07:56:38 crc kubenswrapper[4787]: I0127 07:56:38.075438 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 27 07:56:46 crc kubenswrapper[4787]: I0127 07:56:46.626608 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 27 07:56:54 crc kubenswrapper[4787]: I0127 07:56:54.253544 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 27 07:56:56 crc kubenswrapper[4787]: I0127 07:56:56.540209 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 27 07:56:58 crc kubenswrapper[4787]: I0127 07:56:58.230284 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 27 07:57:03 crc kubenswrapper[4787]: I0127 07:57:03.196018 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.873923 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gjx52"] Jan 27 07:57:32 crc kubenswrapper[4787]: E0127 07:57:32.874915 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44cc5af2-0636-4589-a988-e7e32bfea075" containerName="registry-server" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.874939 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="44cc5af2-0636-4589-a988-e7e32bfea075" containerName="registry-server" Jan 27 07:57:32 crc kubenswrapper[4787]: E0127 07:57:32.874963 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="624e9a13-c9c5-4ef3-8628-056bfc65338b" containerName="extract-utilities" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.874977 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="624e9a13-c9c5-4ef3-8628-056bfc65338b" containerName="extract-utilities" Jan 27 07:57:32 crc kubenswrapper[4787]: E0127 07:57:32.874994 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="624e9a13-c9c5-4ef3-8628-056bfc65338b" containerName="registry-server" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875006 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="624e9a13-c9c5-4ef3-8628-056bfc65338b" containerName="registry-server" Jan 27 07:57:32 crc kubenswrapper[4787]: E0127 07:57:32.875020 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e88d48e-6302-4a84-90a7-69446be90e4a" containerName="marketplace-operator" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875028 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e88d48e-6302-4a84-90a7-69446be90e4a" containerName="marketplace-operator" Jan 27 07:57:32 crc kubenswrapper[4787]: E0127 07:57:32.875038 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44cc5af2-0636-4589-a988-e7e32bfea075" containerName="extract-content" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875046 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="44cc5af2-0636-4589-a988-e7e32bfea075" containerName="extract-content" Jan 27 07:57:32 crc kubenswrapper[4787]: E0127 07:57:32.875055 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7af45749-cd7f-490a-b2f6-bf979ea6467e" containerName="registry-server" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875063 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="7af45749-cd7f-490a-b2f6-bf979ea6467e" containerName="registry-server" Jan 27 07:57:32 crc kubenswrapper[4787]: E0127 07:57:32.875075 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e42281e-20ed-4105-866f-878ffbf6c6eb" containerName="registry-server" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875084 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e42281e-20ed-4105-866f-878ffbf6c6eb" containerName="registry-server" Jan 27 07:57:32 crc kubenswrapper[4787]: E0127 07:57:32.875095 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7af45749-cd7f-490a-b2f6-bf979ea6467e" containerName="extract-content" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875103 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="7af45749-cd7f-490a-b2f6-bf979ea6467e" containerName="extract-content" Jan 27 07:57:32 crc kubenswrapper[4787]: E0127 07:57:32.875116 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7af45749-cd7f-490a-b2f6-bf979ea6467e" containerName="extract-utilities" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875125 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="7af45749-cd7f-490a-b2f6-bf979ea6467e" containerName="extract-utilities" Jan 27 07:57:32 crc kubenswrapper[4787]: E0127 07:57:32.875137 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="624e9a13-c9c5-4ef3-8628-056bfc65338b" containerName="extract-content" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875145 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="624e9a13-c9c5-4ef3-8628-056bfc65338b" containerName="extract-content" Jan 27 07:57:32 crc kubenswrapper[4787]: E0127 07:57:32.875157 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44cc5af2-0636-4589-a988-e7e32bfea075" containerName="extract-utilities" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875165 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="44cc5af2-0636-4589-a988-e7e32bfea075" containerName="extract-utilities" Jan 27 07:57:32 crc kubenswrapper[4787]: E0127 07:57:32.875179 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e42281e-20ed-4105-866f-878ffbf6c6eb" containerName="extract-utilities" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875188 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e42281e-20ed-4105-866f-878ffbf6c6eb" containerName="extract-utilities" Jan 27 07:57:32 crc kubenswrapper[4787]: E0127 07:57:32.875205 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e42281e-20ed-4105-866f-878ffbf6c6eb" containerName="extract-content" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875213 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e42281e-20ed-4105-866f-878ffbf6c6eb" containerName="extract-content" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875338 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="624e9a13-c9c5-4ef3-8628-056bfc65338b" containerName="registry-server" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875354 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="44cc5af2-0636-4589-a988-e7e32bfea075" containerName="registry-server" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875367 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e88d48e-6302-4a84-90a7-69446be90e4a" containerName="marketplace-operator" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875381 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="7af45749-cd7f-490a-b2f6-bf979ea6467e" containerName="registry-server" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.875391 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e42281e-20ed-4105-866f-878ffbf6c6eb" containerName="registry-server" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.876416 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.879945 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.887629 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gjx52"] Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.983296 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htqgr\" (UniqueName: \"kubernetes.io/projected/6953906d-8a35-4d3e-83a3-3a7451e834cc-kube-api-access-htqgr\") pod \"community-operators-gjx52\" (UID: \"6953906d-8a35-4d3e-83a3-3a7451e834cc\") " pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.983374 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6953906d-8a35-4d3e-83a3-3a7451e834cc-catalog-content\") pod \"community-operators-gjx52\" (UID: \"6953906d-8a35-4d3e-83a3-3a7451e834cc\") " pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:32 crc kubenswrapper[4787]: I0127 07:57:32.983515 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6953906d-8a35-4d3e-83a3-3a7451e834cc-utilities\") pod \"community-operators-gjx52\" (UID: \"6953906d-8a35-4d3e-83a3-3a7451e834cc\") " pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.070711 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rdpsb"] Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.071871 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.075176 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.084914 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6953906d-8a35-4d3e-83a3-3a7451e834cc-utilities\") pod \"community-operators-gjx52\" (UID: \"6953906d-8a35-4d3e-83a3-3a7451e834cc\") " pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.085412 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6953906d-8a35-4d3e-83a3-3a7451e834cc-utilities\") pod \"community-operators-gjx52\" (UID: \"6953906d-8a35-4d3e-83a3-3a7451e834cc\") " pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.085535 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htqgr\" (UniqueName: \"kubernetes.io/projected/6953906d-8a35-4d3e-83a3-3a7451e834cc-kube-api-access-htqgr\") pod \"community-operators-gjx52\" (UID: \"6953906d-8a35-4d3e-83a3-3a7451e834cc\") " pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.085593 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6953906d-8a35-4d3e-83a3-3a7451e834cc-catalog-content\") pod \"community-operators-gjx52\" (UID: \"6953906d-8a35-4d3e-83a3-3a7451e834cc\") " pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.087849 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6953906d-8a35-4d3e-83a3-3a7451e834cc-catalog-content\") pod \"community-operators-gjx52\" (UID: \"6953906d-8a35-4d3e-83a3-3a7451e834cc\") " pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.106344 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdpsb"] Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.123792 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htqgr\" (UniqueName: \"kubernetes.io/projected/6953906d-8a35-4d3e-83a3-3a7451e834cc-kube-api-access-htqgr\") pod \"community-operators-gjx52\" (UID: \"6953906d-8a35-4d3e-83a3-3a7451e834cc\") " pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.189384 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6daca56-9e67-41d1-80da-c213717daead-utilities\") pod \"redhat-marketplace-rdpsb\" (UID: \"b6daca56-9e67-41d1-80da-c213717daead\") " pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.189751 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6daca56-9e67-41d1-80da-c213717daead-catalog-content\") pod \"redhat-marketplace-rdpsb\" (UID: \"b6daca56-9e67-41d1-80da-c213717daead\") " pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.189874 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rj2l\" (UniqueName: \"kubernetes.io/projected/b6daca56-9e67-41d1-80da-c213717daead-kube-api-access-6rj2l\") pod \"redhat-marketplace-rdpsb\" (UID: \"b6daca56-9e67-41d1-80da-c213717daead\") " pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.199571 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.291828 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6daca56-9e67-41d1-80da-c213717daead-utilities\") pod \"redhat-marketplace-rdpsb\" (UID: \"b6daca56-9e67-41d1-80da-c213717daead\") " pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.291922 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6daca56-9e67-41d1-80da-c213717daead-catalog-content\") pod \"redhat-marketplace-rdpsb\" (UID: \"b6daca56-9e67-41d1-80da-c213717daead\") " pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.291960 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rj2l\" (UniqueName: \"kubernetes.io/projected/b6daca56-9e67-41d1-80da-c213717daead-kube-api-access-6rj2l\") pod \"redhat-marketplace-rdpsb\" (UID: \"b6daca56-9e67-41d1-80da-c213717daead\") " pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.292485 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6daca56-9e67-41d1-80da-c213717daead-catalog-content\") pod \"redhat-marketplace-rdpsb\" (UID: \"b6daca56-9e67-41d1-80da-c213717daead\") " pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.292754 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6daca56-9e67-41d1-80da-c213717daead-utilities\") pod \"redhat-marketplace-rdpsb\" (UID: \"b6daca56-9e67-41d1-80da-c213717daead\") " pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.320828 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rj2l\" (UniqueName: \"kubernetes.io/projected/b6daca56-9e67-41d1-80da-c213717daead-kube-api-access-6rj2l\") pod \"redhat-marketplace-rdpsb\" (UID: \"b6daca56-9e67-41d1-80da-c213717daead\") " pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.392214 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.639329 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gjx52"] Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.800812 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdpsb"] Jan 27 07:57:33 crc kubenswrapper[4787]: W0127 07:57:33.802934 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6daca56_9e67_41d1_80da_c213717daead.slice/crio-17ce87677e9bd3fe72210433712c6e413796e79c3f921bc7cf6cb6fca1581d2f WatchSource:0}: Error finding container 17ce87677e9bd3fe72210433712c6e413796e79c3f921bc7cf6cb6fca1581d2f: Status 404 returned error can't find the container with id 17ce87677e9bd3fe72210433712c6e413796e79c3f921bc7cf6cb6fca1581d2f Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.950156 4787 generic.go:334] "Generic (PLEG): container finished" podID="6953906d-8a35-4d3e-83a3-3a7451e834cc" containerID="3590d6b4bb06bc944ba0e13d5162ca1a3baa9bd382eb5d6bae622b4765fc0e6e" exitCode=0 Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.950631 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjx52" event={"ID":"6953906d-8a35-4d3e-83a3-3a7451e834cc","Type":"ContainerDied","Data":"3590d6b4bb06bc944ba0e13d5162ca1a3baa9bd382eb5d6bae622b4765fc0e6e"} Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.950787 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjx52" event={"ID":"6953906d-8a35-4d3e-83a3-3a7451e834cc","Type":"ContainerStarted","Data":"2d86cdc40f197a96e1ca231fa9c6b0844138a5f041e3afec28314ca6d04f9d50"} Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.959368 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdpsb" event={"ID":"b6daca56-9e67-41d1-80da-c213717daead","Type":"ContainerStarted","Data":"77ec79c6badfdeac1d0feceb0847c6998ccb5e4ff37bceb496a918544cd170b9"} Jan 27 07:57:33 crc kubenswrapper[4787]: I0127 07:57:33.959425 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdpsb" event={"ID":"b6daca56-9e67-41d1-80da-c213717daead","Type":"ContainerStarted","Data":"17ce87677e9bd3fe72210433712c6e413796e79c3f921bc7cf6cb6fca1581d2f"} Jan 27 07:57:34 crc kubenswrapper[4787]: I0127 07:57:34.966415 4787 generic.go:334] "Generic (PLEG): container finished" podID="b6daca56-9e67-41d1-80da-c213717daead" containerID="77ec79c6badfdeac1d0feceb0847c6998ccb5e4ff37bceb496a918544cd170b9" exitCode=0 Jan 27 07:57:34 crc kubenswrapper[4787]: I0127 07:57:34.966466 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdpsb" event={"ID":"b6daca56-9e67-41d1-80da-c213717daead","Type":"ContainerDied","Data":"77ec79c6badfdeac1d0feceb0847c6998ccb5e4ff37bceb496a918544cd170b9"} Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.268271 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8bz6z"] Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.276022 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.276261 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8bz6z"] Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.278857 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.422302 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c2c8464-ea72-46ca-a23d-ae23e6617ce8-catalog-content\") pod \"redhat-operators-8bz6z\" (UID: \"3c2c8464-ea72-46ca-a23d-ae23e6617ce8\") " pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.422401 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c2c8464-ea72-46ca-a23d-ae23e6617ce8-utilities\") pod \"redhat-operators-8bz6z\" (UID: \"3c2c8464-ea72-46ca-a23d-ae23e6617ce8\") " pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.422437 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrfqx\" (UniqueName: \"kubernetes.io/projected/3c2c8464-ea72-46ca-a23d-ae23e6617ce8-kube-api-access-vrfqx\") pod \"redhat-operators-8bz6z\" (UID: \"3c2c8464-ea72-46ca-a23d-ae23e6617ce8\") " pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.475739 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bjssk"] Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.479188 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.484323 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bjssk"] Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.485237 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.524237 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c2c8464-ea72-46ca-a23d-ae23e6617ce8-catalog-content\") pod \"redhat-operators-8bz6z\" (UID: \"3c2c8464-ea72-46ca-a23d-ae23e6617ce8\") " pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.524409 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c2c8464-ea72-46ca-a23d-ae23e6617ce8-utilities\") pod \"redhat-operators-8bz6z\" (UID: \"3c2c8464-ea72-46ca-a23d-ae23e6617ce8\") " pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.524459 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrfqx\" (UniqueName: \"kubernetes.io/projected/3c2c8464-ea72-46ca-a23d-ae23e6617ce8-kube-api-access-vrfqx\") pod \"redhat-operators-8bz6z\" (UID: \"3c2c8464-ea72-46ca-a23d-ae23e6617ce8\") " pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.525569 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c2c8464-ea72-46ca-a23d-ae23e6617ce8-catalog-content\") pod \"redhat-operators-8bz6z\" (UID: \"3c2c8464-ea72-46ca-a23d-ae23e6617ce8\") " pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.526218 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c2c8464-ea72-46ca-a23d-ae23e6617ce8-utilities\") pod \"redhat-operators-8bz6z\" (UID: \"3c2c8464-ea72-46ca-a23d-ae23e6617ce8\") " pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.549618 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrfqx\" (UniqueName: \"kubernetes.io/projected/3c2c8464-ea72-46ca-a23d-ae23e6617ce8-kube-api-access-vrfqx\") pod \"redhat-operators-8bz6z\" (UID: \"3c2c8464-ea72-46ca-a23d-ae23e6617ce8\") " pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.616361 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.628589 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56eb4b72-a8dc-4300-882e-e29d83442af5-catalog-content\") pod \"certified-operators-bjssk\" (UID: \"56eb4b72-a8dc-4300-882e-e29d83442af5\") " pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.628665 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xn5x\" (UniqueName: \"kubernetes.io/projected/56eb4b72-a8dc-4300-882e-e29d83442af5-kube-api-access-9xn5x\") pod \"certified-operators-bjssk\" (UID: \"56eb4b72-a8dc-4300-882e-e29d83442af5\") " pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.628700 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56eb4b72-a8dc-4300-882e-e29d83442af5-utilities\") pod \"certified-operators-bjssk\" (UID: \"56eb4b72-a8dc-4300-882e-e29d83442af5\") " pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.730309 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xn5x\" (UniqueName: \"kubernetes.io/projected/56eb4b72-a8dc-4300-882e-e29d83442af5-kube-api-access-9xn5x\") pod \"certified-operators-bjssk\" (UID: \"56eb4b72-a8dc-4300-882e-e29d83442af5\") " pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.730389 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56eb4b72-a8dc-4300-882e-e29d83442af5-utilities\") pod \"certified-operators-bjssk\" (UID: \"56eb4b72-a8dc-4300-882e-e29d83442af5\") " pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.730485 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56eb4b72-a8dc-4300-882e-e29d83442af5-catalog-content\") pod \"certified-operators-bjssk\" (UID: \"56eb4b72-a8dc-4300-882e-e29d83442af5\") " pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.731039 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56eb4b72-a8dc-4300-882e-e29d83442af5-catalog-content\") pod \"certified-operators-bjssk\" (UID: \"56eb4b72-a8dc-4300-882e-e29d83442af5\") " pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.732182 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56eb4b72-a8dc-4300-882e-e29d83442af5-utilities\") pod \"certified-operators-bjssk\" (UID: \"56eb4b72-a8dc-4300-882e-e29d83442af5\") " pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.760935 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xn5x\" (UniqueName: \"kubernetes.io/projected/56eb4b72-a8dc-4300-882e-e29d83442af5-kube-api-access-9xn5x\") pod \"certified-operators-bjssk\" (UID: \"56eb4b72-a8dc-4300-882e-e29d83442af5\") " pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.807673 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.839471 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8bz6z"] Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.975030 4787 generic.go:334] "Generic (PLEG): container finished" podID="b6daca56-9e67-41d1-80da-c213717daead" containerID="452a6e75cc64f0f6e72889906fdf691f86ceddac279d22cf78027918918f427b" exitCode=0 Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.975117 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdpsb" event={"ID":"b6daca56-9e67-41d1-80da-c213717daead","Type":"ContainerDied","Data":"452a6e75cc64f0f6e72889906fdf691f86ceddac279d22cf78027918918f427b"} Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.981799 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8bz6z" event={"ID":"3c2c8464-ea72-46ca-a23d-ae23e6617ce8","Type":"ContainerStarted","Data":"1a55db6079e4e2a4a90bebff02db8967f3bdc067060b1b8a2a9ef07821d7ec0e"} Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.987047 4787 generic.go:334] "Generic (PLEG): container finished" podID="6953906d-8a35-4d3e-83a3-3a7451e834cc" containerID="87c497327bb297ed101f098db88efc948651c4ea93ebe1b661e3bbbd9fe3b87e" exitCode=0 Jan 27 07:57:35 crc kubenswrapper[4787]: I0127 07:57:35.987110 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjx52" event={"ID":"6953906d-8a35-4d3e-83a3-3a7451e834cc","Type":"ContainerDied","Data":"87c497327bb297ed101f098db88efc948651c4ea93ebe1b661e3bbbd9fe3b87e"} Jan 27 07:57:36 crc kubenswrapper[4787]: I0127 07:57:36.015943 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bjssk"] Jan 27 07:57:36 crc kubenswrapper[4787]: W0127 07:57:36.099681 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56eb4b72_a8dc_4300_882e_e29d83442af5.slice/crio-26ad75d0b6e5df9310ef03340acda404c5e435f68b2579f3d72291bcb30d5c0e WatchSource:0}: Error finding container 26ad75d0b6e5df9310ef03340acda404c5e435f68b2579f3d72291bcb30d5c0e: Status 404 returned error can't find the container with id 26ad75d0b6e5df9310ef03340acda404c5e435f68b2579f3d72291bcb30d5c0e Jan 27 07:57:36 crc kubenswrapper[4787]: I0127 07:57:36.995690 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjx52" event={"ID":"6953906d-8a35-4d3e-83a3-3a7451e834cc","Type":"ContainerStarted","Data":"3ba512f602a55355a937d1e0176c09fa5d943aae8d30ab63c2d2b4257d1ab4d3"} Jan 27 07:57:36 crc kubenswrapper[4787]: I0127 07:57:36.996756 4787 generic.go:334] "Generic (PLEG): container finished" podID="56eb4b72-a8dc-4300-882e-e29d83442af5" containerID="d3ea1a5612859ddae609b3ec66783a57dbe865e2cdb441b8e18db1598e6abe3b" exitCode=0 Jan 27 07:57:36 crc kubenswrapper[4787]: I0127 07:57:36.996813 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bjssk" event={"ID":"56eb4b72-a8dc-4300-882e-e29d83442af5","Type":"ContainerDied","Data":"d3ea1a5612859ddae609b3ec66783a57dbe865e2cdb441b8e18db1598e6abe3b"} Jan 27 07:57:36 crc kubenswrapper[4787]: I0127 07:57:36.996839 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bjssk" event={"ID":"56eb4b72-a8dc-4300-882e-e29d83442af5","Type":"ContainerStarted","Data":"26ad75d0b6e5df9310ef03340acda404c5e435f68b2579f3d72291bcb30d5c0e"} Jan 27 07:57:36 crc kubenswrapper[4787]: I0127 07:57:36.999776 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdpsb" event={"ID":"b6daca56-9e67-41d1-80da-c213717daead","Type":"ContainerStarted","Data":"7d0d94088a216e93ead0da66f75cca6ceb6f5d7752af44e0199239794d47a603"} Jan 27 07:57:37 crc kubenswrapper[4787]: I0127 07:57:37.004770 4787 generic.go:334] "Generic (PLEG): container finished" podID="3c2c8464-ea72-46ca-a23d-ae23e6617ce8" containerID="d22d20ce2f8b93a65bfab82a4cce35acf4607e4bdf62e7094833f70e242b2c6e" exitCode=0 Jan 27 07:57:37 crc kubenswrapper[4787]: I0127 07:57:37.004815 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8bz6z" event={"ID":"3c2c8464-ea72-46ca-a23d-ae23e6617ce8","Type":"ContainerDied","Data":"d22d20ce2f8b93a65bfab82a4cce35acf4607e4bdf62e7094833f70e242b2c6e"} Jan 27 07:57:37 crc kubenswrapper[4787]: I0127 07:57:37.027803 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gjx52" podStartSLOduration=2.281031369 podStartE2EDuration="5.027782652s" podCreationTimestamp="2026-01-27 07:57:32 +0000 UTC" firstStartedPulling="2026-01-27 07:57:33.952263828 +0000 UTC m=+359.604619340" lastFinishedPulling="2026-01-27 07:57:36.699015131 +0000 UTC m=+362.351370623" observedRunningTime="2026-01-27 07:57:37.024033935 +0000 UTC m=+362.676389427" watchObservedRunningTime="2026-01-27 07:57:37.027782652 +0000 UTC m=+362.680138144" Jan 27 07:57:37 crc kubenswrapper[4787]: I0127 07:57:37.069122 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rdpsb" podStartSLOduration=1.6315764320000001 podStartE2EDuration="4.069103309s" podCreationTimestamp="2026-01-27 07:57:33 +0000 UTC" firstStartedPulling="2026-01-27 07:57:33.961488666 +0000 UTC m=+359.613844158" lastFinishedPulling="2026-01-27 07:57:36.399015543 +0000 UTC m=+362.051371035" observedRunningTime="2026-01-27 07:57:37.066393229 +0000 UTC m=+362.718748721" watchObservedRunningTime="2026-01-27 07:57:37.069103309 +0000 UTC m=+362.721458801" Jan 27 07:57:38 crc kubenswrapper[4787]: I0127 07:57:38.012005 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8bz6z" event={"ID":"3c2c8464-ea72-46ca-a23d-ae23e6617ce8","Type":"ContainerStarted","Data":"41cdb4514795bd2b27764d78141be18c4c2c76a4058e675c9a517264c45ac8ad"} Jan 27 07:57:38 crc kubenswrapper[4787]: I0127 07:57:38.014881 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bjssk" event={"ID":"56eb4b72-a8dc-4300-882e-e29d83442af5","Type":"ContainerStarted","Data":"f44e6cb9089ace99223ebb1f10a8492aee7f149bbb8b863b35bedd0766f59398"} Jan 27 07:57:39 crc kubenswrapper[4787]: I0127 07:57:39.025007 4787 generic.go:334] "Generic (PLEG): container finished" podID="56eb4b72-a8dc-4300-882e-e29d83442af5" containerID="f44e6cb9089ace99223ebb1f10a8492aee7f149bbb8b863b35bedd0766f59398" exitCode=0 Jan 27 07:57:39 crc kubenswrapper[4787]: I0127 07:57:39.025128 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bjssk" event={"ID":"56eb4b72-a8dc-4300-882e-e29d83442af5","Type":"ContainerDied","Data":"f44e6cb9089ace99223ebb1f10a8492aee7f149bbb8b863b35bedd0766f59398"} Jan 27 07:57:39 crc kubenswrapper[4787]: I0127 07:57:39.028660 4787 generic.go:334] "Generic (PLEG): container finished" podID="3c2c8464-ea72-46ca-a23d-ae23e6617ce8" containerID="41cdb4514795bd2b27764d78141be18c4c2c76a4058e675c9a517264c45ac8ad" exitCode=0 Jan 27 07:57:39 crc kubenswrapper[4787]: I0127 07:57:39.028717 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8bz6z" event={"ID":"3c2c8464-ea72-46ca-a23d-ae23e6617ce8","Type":"ContainerDied","Data":"41cdb4514795bd2b27764d78141be18c4c2c76a4058e675c9a517264c45ac8ad"} Jan 27 07:57:40 crc kubenswrapper[4787]: I0127 07:57:40.057466 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8bz6z" event={"ID":"3c2c8464-ea72-46ca-a23d-ae23e6617ce8","Type":"ContainerStarted","Data":"66c4152080012d9a86134e18a8ea684c2b308eb6bfb64b1c394cbf5e8914e450"} Jan 27 07:57:40 crc kubenswrapper[4787]: I0127 07:57:40.077045 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8bz6z" podStartSLOduration=2.404908954 podStartE2EDuration="5.077004828s" podCreationTimestamp="2026-01-27 07:57:35 +0000 UTC" firstStartedPulling="2026-01-27 07:57:37.006569495 +0000 UTC m=+362.658924987" lastFinishedPulling="2026-01-27 07:57:39.678665359 +0000 UTC m=+365.331020861" observedRunningTime="2026-01-27 07:57:40.074681588 +0000 UTC m=+365.727037090" watchObservedRunningTime="2026-01-27 07:57:40.077004828 +0000 UTC m=+365.729360320" Jan 27 07:57:40 crc kubenswrapper[4787]: I0127 07:57:40.077914 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bjssk" event={"ID":"56eb4b72-a8dc-4300-882e-e29d83442af5","Type":"ContainerStarted","Data":"eefecb3bb6d66c896447432f595d922732a7f7c819e2dd97bafc99e41a7c5a27"} Jan 27 07:57:40 crc kubenswrapper[4787]: I0127 07:57:40.100607 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bjssk" podStartSLOduration=2.6588983219999998 podStartE2EDuration="5.100583996s" podCreationTimestamp="2026-01-27 07:57:35 +0000 UTC" firstStartedPulling="2026-01-27 07:57:36.998784803 +0000 UTC m=+362.651140295" lastFinishedPulling="2026-01-27 07:57:39.440470477 +0000 UTC m=+365.092825969" observedRunningTime="2026-01-27 07:57:40.097660441 +0000 UTC m=+365.750015953" watchObservedRunningTime="2026-01-27 07:57:40.100583996 +0000 UTC m=+365.752939498" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.200099 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.200703 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.259406 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.393434 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.393945 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.462124 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.772404 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ln8k8"] Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.773279 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.842304 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ln8k8"] Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.870756 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c2a764dc-d301-4cef-aa6d-67f785db5868-trusted-ca\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.870840 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c2a764dc-d301-4cef-aa6d-67f785db5868-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.870878 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfbsj\" (UniqueName: \"kubernetes.io/projected/c2a764dc-d301-4cef-aa6d-67f785db5868-kube-api-access-cfbsj\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.871081 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c2a764dc-d301-4cef-aa6d-67f785db5868-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.871136 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c2a764dc-d301-4cef-aa6d-67f785db5868-registry-tls\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.871175 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c2a764dc-d301-4cef-aa6d-67f785db5868-registry-certificates\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.871206 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c2a764dc-d301-4cef-aa6d-67f785db5868-bound-sa-token\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.871294 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.896382 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.973088 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c2a764dc-d301-4cef-aa6d-67f785db5868-trusted-ca\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.973233 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c2a764dc-d301-4cef-aa6d-67f785db5868-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.973275 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfbsj\" (UniqueName: \"kubernetes.io/projected/c2a764dc-d301-4cef-aa6d-67f785db5868-kube-api-access-cfbsj\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.973306 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c2a764dc-d301-4cef-aa6d-67f785db5868-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.973350 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c2a764dc-d301-4cef-aa6d-67f785db5868-registry-tls\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.973400 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c2a764dc-d301-4cef-aa6d-67f785db5868-registry-certificates\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.973429 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c2a764dc-d301-4cef-aa6d-67f785db5868-bound-sa-token\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.974184 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c2a764dc-d301-4cef-aa6d-67f785db5868-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.974861 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c2a764dc-d301-4cef-aa6d-67f785db5868-trusted-ca\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.975432 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c2a764dc-d301-4cef-aa6d-67f785db5868-registry-certificates\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.980747 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c2a764dc-d301-4cef-aa6d-67f785db5868-registry-tls\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.981250 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c2a764dc-d301-4cef-aa6d-67f785db5868-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.992326 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c2a764dc-d301-4cef-aa6d-67f785db5868-bound-sa-token\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:43 crc kubenswrapper[4787]: I0127 07:57:43.994043 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfbsj\" (UniqueName: \"kubernetes.io/projected/c2a764dc-d301-4cef-aa6d-67f785db5868-kube-api-access-cfbsj\") pod \"image-registry-66df7c8f76-ln8k8\" (UID: \"c2a764dc-d301-4cef-aa6d-67f785db5868\") " pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:44 crc kubenswrapper[4787]: I0127 07:57:44.091013 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:44 crc kubenswrapper[4787]: I0127 07:57:44.164652 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rdpsb" Jan 27 07:57:44 crc kubenswrapper[4787]: I0127 07:57:44.165787 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gjx52" Jan 27 07:57:44 crc kubenswrapper[4787]: I0127 07:57:44.334233 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ln8k8"] Jan 27 07:57:45 crc kubenswrapper[4787]: I0127 07:57:45.128412 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" event={"ID":"c2a764dc-d301-4cef-aa6d-67f785db5868","Type":"ContainerStarted","Data":"cc31b61aca8e41f56ff29d382be7ae8a36f77b0b3ed3f42f7d94539de9163981"} Jan 27 07:57:45 crc kubenswrapper[4787]: I0127 07:57:45.129160 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" event={"ID":"c2a764dc-d301-4cef-aa6d-67f785db5868","Type":"ContainerStarted","Data":"8acde4aaed8887ff89bc1532c729baf24ed884982074e088d86cc2476e606b1d"} Jan 27 07:57:45 crc kubenswrapper[4787]: I0127 07:57:45.156729 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" podStartSLOduration=2.156708876 podStartE2EDuration="2.156708876s" podCreationTimestamp="2026-01-27 07:57:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 07:57:45.150896815 +0000 UTC m=+370.803252327" watchObservedRunningTime="2026-01-27 07:57:45.156708876 +0000 UTC m=+370.809064358" Jan 27 07:57:45 crc kubenswrapper[4787]: I0127 07:57:45.616941 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:57:45 crc kubenswrapper[4787]: I0127 07:57:45.617016 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:57:45 crc kubenswrapper[4787]: I0127 07:57:45.807942 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:45 crc kubenswrapper[4787]: I0127 07:57:45.808038 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:45 crc kubenswrapper[4787]: I0127 07:57:45.857965 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:46 crc kubenswrapper[4787]: I0127 07:57:46.134473 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:57:46 crc kubenswrapper[4787]: I0127 07:57:46.188449 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bjssk" Jan 27 07:57:46 crc kubenswrapper[4787]: I0127 07:57:46.665381 4787 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8bz6z" podUID="3c2c8464-ea72-46ca-a23d-ae23e6617ce8" containerName="registry-server" probeResult="failure" output=< Jan 27 07:57:46 crc kubenswrapper[4787]: timeout: failed to connect service ":50051" within 1s Jan 27 07:57:46 crc kubenswrapper[4787]: > Jan 27 07:57:52 crc kubenswrapper[4787]: I0127 07:57:52.822855 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 07:57:52 crc kubenswrapper[4787]: I0127 07:57:52.823740 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 07:57:55 crc kubenswrapper[4787]: I0127 07:57:55.661087 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:57:55 crc kubenswrapper[4787]: I0127 07:57:55.714863 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8bz6z" Jan 27 07:58:04 crc kubenswrapper[4787]: I0127 07:58:04.099276 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-ln8k8" Jan 27 07:58:04 crc kubenswrapper[4787]: I0127 07:58:04.163633 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-d44bj"] Jan 27 07:58:22 crc kubenswrapper[4787]: I0127 07:58:22.823731 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 07:58:22 crc kubenswrapper[4787]: I0127 07:58:22.824979 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.208682 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" podUID="5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0" containerName="registry" containerID="cri-o://661df021152bc7f98a651ccf560f2bb03e44a6e4242d1b20ac64c7c4a614aaf1" gracePeriod=30 Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.424526 4787 generic.go:334] "Generic (PLEG): container finished" podID="5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0" containerID="661df021152bc7f98a651ccf560f2bb03e44a6e4242d1b20ac64c7c4a614aaf1" exitCode=0 Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.424620 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" event={"ID":"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0","Type":"ContainerDied","Data":"661df021152bc7f98a651ccf560f2bb03e44a6e4242d1b20ac64c7c4a614aaf1"} Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.564212 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.656697 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-registry-tls\") pod \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.656751 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-bound-sa-token\") pod \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.656985 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.657031 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-installation-pull-secrets\") pod \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.657079 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd9pb\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-kube-api-access-sd9pb\") pod \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.657123 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-registry-certificates\") pod \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.657171 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-trusted-ca\") pod \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.657237 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-ca-trust-extracted\") pod \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\" (UID: \"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0\") " Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.658204 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.658372 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.664739 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-kube-api-access-sd9pb" (OuterVolumeSpecName: "kube-api-access-sd9pb") pod "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0"). InnerVolumeSpecName "kube-api-access-sd9pb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.664737 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.665181 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.666310 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.671233 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.676160 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0" (UID: "5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.758299 4787 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.758342 4787 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.758394 4787 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.758409 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd9pb\" (UniqueName: \"kubernetes.io/projected/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-kube-api-access-sd9pb\") on node \"crc\" DevicePath \"\"" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.758420 4787 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.758430 4787 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 27 07:58:29 crc kubenswrapper[4787]: I0127 07:58:29.758440 4787 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 27 07:58:30 crc kubenswrapper[4787]: I0127 07:58:30.435522 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" event={"ID":"5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0","Type":"ContainerDied","Data":"f164a16895fc4826be32f420da3768fd0f293a4a904dce0ed637757e1fbcf513"} Jan 27 07:58:30 crc kubenswrapper[4787]: I0127 07:58:30.436838 4787 scope.go:117] "RemoveContainer" containerID="661df021152bc7f98a651ccf560f2bb03e44a6e4242d1b20ac64c7c4a614aaf1" Jan 27 07:58:30 crc kubenswrapper[4787]: I0127 07:58:30.435744 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-d44bj" Jan 27 07:58:30 crc kubenswrapper[4787]: I0127 07:58:30.487031 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-d44bj"] Jan 27 07:58:30 crc kubenswrapper[4787]: I0127 07:58:30.494132 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-d44bj"] Jan 27 07:58:31 crc kubenswrapper[4787]: I0127 07:58:31.090261 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0" path="/var/lib/kubelet/pods/5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0/volumes" Jan 27 07:58:52 crc kubenswrapper[4787]: I0127 07:58:52.823923 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 07:58:52 crc kubenswrapper[4787]: I0127 07:58:52.825091 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 07:58:52 crc kubenswrapper[4787]: I0127 07:58:52.825179 4787 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 07:58:52 crc kubenswrapper[4787]: I0127 07:58:52.827102 4787 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ef7962ef772271e4959fec84d91ffb697a93484f02f3f1aa59cfda3d789d7c84"} pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 27 07:58:52 crc kubenswrapper[4787]: I0127 07:58:52.827311 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" containerID="cri-o://ef7962ef772271e4959fec84d91ffb697a93484f02f3f1aa59cfda3d789d7c84" gracePeriod=600 Jan 27 07:58:53 crc kubenswrapper[4787]: I0127 07:58:53.596283 4787 generic.go:334] "Generic (PLEG): container finished" podID="f051e184-acac-47cf-9e04-9df648288715" containerID="ef7962ef772271e4959fec84d91ffb697a93484f02f3f1aa59cfda3d789d7c84" exitCode=0 Jan 27 07:58:53 crc kubenswrapper[4787]: I0127 07:58:53.596364 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerDied","Data":"ef7962ef772271e4959fec84d91ffb697a93484f02f3f1aa59cfda3d789d7c84"} Jan 27 07:58:53 crc kubenswrapper[4787]: I0127 07:58:53.597180 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerStarted","Data":"e4c1a6fd72ac92bada26c4a251440b894a7362ee02ad76996c3668ab0c3d7b09"} Jan 27 07:58:53 crc kubenswrapper[4787]: I0127 07:58:53.597213 4787 scope.go:117] "RemoveContainer" containerID="8829867d627dfed47b988a93a9ac3e381c0bf59794b65f8e1c1e821838477748" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.181191 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn"] Jan 27 08:00:00 crc kubenswrapper[4787]: E0127 08:00:00.182183 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0" containerName="registry" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.182198 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0" containerName="registry" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.182288 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bd6837c-65d0-40c3-9cbc-d05d4f1e93d0" containerName="registry" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.182741 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.185186 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.185537 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.196453 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn"] Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.208959 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-config-volume\") pod \"collect-profiles-29491680-tghsn\" (UID: \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.209468 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4twzb\" (UniqueName: \"kubernetes.io/projected/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-kube-api-access-4twzb\") pod \"collect-profiles-29491680-tghsn\" (UID: \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.209540 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-secret-volume\") pod \"collect-profiles-29491680-tghsn\" (UID: \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.310292 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-secret-volume\") pod \"collect-profiles-29491680-tghsn\" (UID: \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.310375 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-config-volume\") pod \"collect-profiles-29491680-tghsn\" (UID: \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.310424 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4twzb\" (UniqueName: \"kubernetes.io/projected/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-kube-api-access-4twzb\") pod \"collect-profiles-29491680-tghsn\" (UID: \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.311773 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-config-volume\") pod \"collect-profiles-29491680-tghsn\" (UID: \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.318182 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-secret-volume\") pod \"collect-profiles-29491680-tghsn\" (UID: \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.326783 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4twzb\" (UniqueName: \"kubernetes.io/projected/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-kube-api-access-4twzb\") pod \"collect-profiles-29491680-tghsn\" (UID: \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.508331 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" Jan 27 08:00:00 crc kubenswrapper[4787]: I0127 08:00:00.723720 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn"] Jan 27 08:00:01 crc kubenswrapper[4787]: I0127 08:00:01.369259 4787 generic.go:334] "Generic (PLEG): container finished" podID="0f2fa041-8cee-4085-be76-4eb4b5e9cdb3" containerID="1be0ed9c2bc25caff91582d8baee05ebb7c0bfff01b2f985dd4895d1df918858" exitCode=0 Jan 27 08:00:01 crc kubenswrapper[4787]: I0127 08:00:01.369397 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" event={"ID":"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3","Type":"ContainerDied","Data":"1be0ed9c2bc25caff91582d8baee05ebb7c0bfff01b2f985dd4895d1df918858"} Jan 27 08:00:01 crc kubenswrapper[4787]: I0127 08:00:01.369793 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" event={"ID":"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3","Type":"ContainerStarted","Data":"bd32da43f356f5ad22f56645e3c597f5643d85a114e4644224475a6430879cd2"} Jan 27 08:00:02 crc kubenswrapper[4787]: I0127 08:00:02.621248 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" Jan 27 08:00:02 crc kubenswrapper[4787]: I0127 08:00:02.747620 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-secret-volume\") pod \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\" (UID: \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\") " Jan 27 08:00:02 crc kubenswrapper[4787]: I0127 08:00:02.747765 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-config-volume\") pod \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\" (UID: \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\") " Jan 27 08:00:02 crc kubenswrapper[4787]: I0127 08:00:02.747795 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4twzb\" (UniqueName: \"kubernetes.io/projected/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-kube-api-access-4twzb\") pod \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\" (UID: \"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3\") " Jan 27 08:00:02 crc kubenswrapper[4787]: I0127 08:00:02.748932 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-config-volume" (OuterVolumeSpecName: "config-volume") pod "0f2fa041-8cee-4085-be76-4eb4b5e9cdb3" (UID: "0f2fa041-8cee-4085-be76-4eb4b5e9cdb3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:00:02 crc kubenswrapper[4787]: I0127 08:00:02.753566 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-kube-api-access-4twzb" (OuterVolumeSpecName: "kube-api-access-4twzb") pod "0f2fa041-8cee-4085-be76-4eb4b5e9cdb3" (UID: "0f2fa041-8cee-4085-be76-4eb4b5e9cdb3"). InnerVolumeSpecName "kube-api-access-4twzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:00:02 crc kubenswrapper[4787]: I0127 08:00:02.753595 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0f2fa041-8cee-4085-be76-4eb4b5e9cdb3" (UID: "0f2fa041-8cee-4085-be76-4eb4b5e9cdb3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:00:02 crc kubenswrapper[4787]: I0127 08:00:02.849089 4787 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 27 08:00:02 crc kubenswrapper[4787]: I0127 08:00:02.849161 4787 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-config-volume\") on node \"crc\" DevicePath \"\"" Jan 27 08:00:02 crc kubenswrapper[4787]: I0127 08:00:02.849192 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4twzb\" (UniqueName: \"kubernetes.io/projected/0f2fa041-8cee-4085-be76-4eb4b5e9cdb3-kube-api-access-4twzb\") on node \"crc\" DevicePath \"\"" Jan 27 08:00:03 crc kubenswrapper[4787]: I0127 08:00:03.383814 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" event={"ID":"0f2fa041-8cee-4085-be76-4eb4b5e9cdb3","Type":"ContainerDied","Data":"bd32da43f356f5ad22f56645e3c597f5643d85a114e4644224475a6430879cd2"} Jan 27 08:00:03 crc kubenswrapper[4787]: I0127 08:00:03.383871 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd32da43f356f5ad22f56645e3c597f5643d85a114e4644224475a6430879cd2" Jan 27 08:00:03 crc kubenswrapper[4787]: I0127 08:00:03.383943 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29491680-tghsn" Jan 27 08:01:22 crc kubenswrapper[4787]: I0127 08:01:22.822826 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:01:22 crc kubenswrapper[4787]: I0127 08:01:22.824690 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:01:52 crc kubenswrapper[4787]: I0127 08:01:52.823414 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:01:52 crc kubenswrapper[4787]: I0127 08:01:52.824334 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:02:22 crc kubenswrapper[4787]: I0127 08:02:22.823967 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:02:22 crc kubenswrapper[4787]: I0127 08:02:22.824920 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:02:22 crc kubenswrapper[4787]: I0127 08:02:22.825031 4787 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 08:02:22 crc kubenswrapper[4787]: I0127 08:02:22.826326 4787 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e4c1a6fd72ac92bada26c4a251440b894a7362ee02ad76996c3668ab0c3d7b09"} pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 27 08:02:22 crc kubenswrapper[4787]: I0127 08:02:22.826460 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" containerID="cri-o://e4c1a6fd72ac92bada26c4a251440b894a7362ee02ad76996c3668ab0c3d7b09" gracePeriod=600 Jan 27 08:02:23 crc kubenswrapper[4787]: I0127 08:02:23.398126 4787 generic.go:334] "Generic (PLEG): container finished" podID="f051e184-acac-47cf-9e04-9df648288715" containerID="e4c1a6fd72ac92bada26c4a251440b894a7362ee02ad76996c3668ab0c3d7b09" exitCode=0 Jan 27 08:02:23 crc kubenswrapper[4787]: I0127 08:02:23.398309 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerDied","Data":"e4c1a6fd72ac92bada26c4a251440b894a7362ee02ad76996c3668ab0c3d7b09"} Jan 27 08:02:23 crc kubenswrapper[4787]: I0127 08:02:23.398675 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerStarted","Data":"76c11a5da51cedd24f4d664015e61242e532ce42823eb519b69423acc27d454d"} Jan 27 08:02:23 crc kubenswrapper[4787]: I0127 08:02:23.398703 4787 scope.go:117] "RemoveContainer" containerID="ef7962ef772271e4959fec84d91ffb697a93484f02f3f1aa59cfda3d789d7c84" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.183959 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v"] Jan 27 08:03:18 crc kubenswrapper[4787]: E0127 08:03:18.184955 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f2fa041-8cee-4085-be76-4eb4b5e9cdb3" containerName="collect-profiles" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.184970 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f2fa041-8cee-4085-be76-4eb4b5e9cdb3" containerName="collect-profiles" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.185101 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f2fa041-8cee-4085-be76-4eb4b5e9cdb3" containerName="collect-profiles" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.185894 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.188011 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.199270 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v"] Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.320346 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btqb4\" (UniqueName: \"kubernetes.io/projected/b105de51-099b-4b81-b99d-6aa63d8821ae-kube-api-access-btqb4\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v\" (UID: \"b105de51-099b-4b81-b99d-6aa63d8821ae\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.320404 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b105de51-099b-4b81-b99d-6aa63d8821ae-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v\" (UID: \"b105de51-099b-4b81-b99d-6aa63d8821ae\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.320569 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b105de51-099b-4b81-b99d-6aa63d8821ae-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v\" (UID: \"b105de51-099b-4b81-b99d-6aa63d8821ae\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.422781 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btqb4\" (UniqueName: \"kubernetes.io/projected/b105de51-099b-4b81-b99d-6aa63d8821ae-kube-api-access-btqb4\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v\" (UID: \"b105de51-099b-4b81-b99d-6aa63d8821ae\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.422885 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b105de51-099b-4b81-b99d-6aa63d8821ae-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v\" (UID: \"b105de51-099b-4b81-b99d-6aa63d8821ae\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.422952 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b105de51-099b-4b81-b99d-6aa63d8821ae-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v\" (UID: \"b105de51-099b-4b81-b99d-6aa63d8821ae\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.424086 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b105de51-099b-4b81-b99d-6aa63d8821ae-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v\" (UID: \"b105de51-099b-4b81-b99d-6aa63d8821ae\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.424146 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b105de51-099b-4b81-b99d-6aa63d8821ae-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v\" (UID: \"b105de51-099b-4b81-b99d-6aa63d8821ae\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.464605 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btqb4\" (UniqueName: \"kubernetes.io/projected/b105de51-099b-4b81-b99d-6aa63d8821ae-kube-api-access-btqb4\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v\" (UID: \"b105de51-099b-4b81-b99d-6aa63d8821ae\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.503338 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" Jan 27 08:03:18 crc kubenswrapper[4787]: I0127 08:03:18.740846 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v"] Jan 27 08:03:18 crc kubenswrapper[4787]: W0127 08:03:18.757240 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb105de51_099b_4b81_b99d_6aa63d8821ae.slice/crio-75554e486da5acb10dc400cbefb82d6888492aa052b9eeab625e4178740f5bf7 WatchSource:0}: Error finding container 75554e486da5acb10dc400cbefb82d6888492aa052b9eeab625e4178740f5bf7: Status 404 returned error can't find the container with id 75554e486da5acb10dc400cbefb82d6888492aa052b9eeab625e4178740f5bf7 Jan 27 08:03:19 crc kubenswrapper[4787]: I0127 08:03:19.788964 4787 generic.go:334] "Generic (PLEG): container finished" podID="b105de51-099b-4b81-b99d-6aa63d8821ae" containerID="a8b8f2e80477ccecfdc1b7784d1b38fffdf7c2947da74d1752084526cd690013" exitCode=0 Jan 27 08:03:19 crc kubenswrapper[4787]: I0127 08:03:19.789046 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" event={"ID":"b105de51-099b-4b81-b99d-6aa63d8821ae","Type":"ContainerDied","Data":"a8b8f2e80477ccecfdc1b7784d1b38fffdf7c2947da74d1752084526cd690013"} Jan 27 08:03:19 crc kubenswrapper[4787]: I0127 08:03:19.789441 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" event={"ID":"b105de51-099b-4b81-b99d-6aa63d8821ae","Type":"ContainerStarted","Data":"75554e486da5acb10dc400cbefb82d6888492aa052b9eeab625e4178740f5bf7"} Jan 27 08:03:19 crc kubenswrapper[4787]: I0127 08:03:19.793993 4787 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 27 08:03:21 crc kubenswrapper[4787]: I0127 08:03:21.813171 4787 generic.go:334] "Generic (PLEG): container finished" podID="b105de51-099b-4b81-b99d-6aa63d8821ae" containerID="e7193af437019d55be4443c0b7d3c5b45c81b64b96676e31ece60553f5a9a993" exitCode=0 Jan 27 08:03:21 crc kubenswrapper[4787]: I0127 08:03:21.813758 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" event={"ID":"b105de51-099b-4b81-b99d-6aa63d8821ae","Type":"ContainerDied","Data":"e7193af437019d55be4443c0b7d3c5b45c81b64b96676e31ece60553f5a9a993"} Jan 27 08:03:22 crc kubenswrapper[4787]: I0127 08:03:22.826500 4787 generic.go:334] "Generic (PLEG): container finished" podID="b105de51-099b-4b81-b99d-6aa63d8821ae" containerID="03b0d8be59b6c6084adde90eb5992df7e4072a74d746be53a75159504f1c8c26" exitCode=0 Jan 27 08:03:22 crc kubenswrapper[4787]: I0127 08:03:22.826641 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" event={"ID":"b105de51-099b-4b81-b99d-6aa63d8821ae","Type":"ContainerDied","Data":"03b0d8be59b6c6084adde90eb5992df7e4072a74d746be53a75159504f1c8c26"} Jan 27 08:03:24 crc kubenswrapper[4787]: I0127 08:03:24.089790 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" Jan 27 08:03:24 crc kubenswrapper[4787]: I0127 08:03:24.207045 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b105de51-099b-4b81-b99d-6aa63d8821ae-bundle\") pod \"b105de51-099b-4b81-b99d-6aa63d8821ae\" (UID: \"b105de51-099b-4b81-b99d-6aa63d8821ae\") " Jan 27 08:03:24 crc kubenswrapper[4787]: I0127 08:03:24.207099 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b105de51-099b-4b81-b99d-6aa63d8821ae-util\") pod \"b105de51-099b-4b81-b99d-6aa63d8821ae\" (UID: \"b105de51-099b-4b81-b99d-6aa63d8821ae\") " Jan 27 08:03:24 crc kubenswrapper[4787]: I0127 08:03:24.207253 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btqb4\" (UniqueName: \"kubernetes.io/projected/b105de51-099b-4b81-b99d-6aa63d8821ae-kube-api-access-btqb4\") pod \"b105de51-099b-4b81-b99d-6aa63d8821ae\" (UID: \"b105de51-099b-4b81-b99d-6aa63d8821ae\") " Jan 27 08:03:24 crc kubenswrapper[4787]: I0127 08:03:24.208345 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b105de51-099b-4b81-b99d-6aa63d8821ae-bundle" (OuterVolumeSpecName: "bundle") pod "b105de51-099b-4b81-b99d-6aa63d8821ae" (UID: "b105de51-099b-4b81-b99d-6aa63d8821ae"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:03:24 crc kubenswrapper[4787]: I0127 08:03:24.221376 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b105de51-099b-4b81-b99d-6aa63d8821ae-util" (OuterVolumeSpecName: "util") pod "b105de51-099b-4b81-b99d-6aa63d8821ae" (UID: "b105de51-099b-4b81-b99d-6aa63d8821ae"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:03:24 crc kubenswrapper[4787]: I0127 08:03:24.224970 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b105de51-099b-4b81-b99d-6aa63d8821ae-kube-api-access-btqb4" (OuterVolumeSpecName: "kube-api-access-btqb4") pod "b105de51-099b-4b81-b99d-6aa63d8821ae" (UID: "b105de51-099b-4b81-b99d-6aa63d8821ae"). InnerVolumeSpecName "kube-api-access-btqb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:03:24 crc kubenswrapper[4787]: I0127 08:03:24.308894 4787 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b105de51-099b-4b81-b99d-6aa63d8821ae-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:24 crc kubenswrapper[4787]: I0127 08:03:24.308934 4787 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b105de51-099b-4b81-b99d-6aa63d8821ae-util\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:24 crc kubenswrapper[4787]: I0127 08:03:24.308944 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btqb4\" (UniqueName: \"kubernetes.io/projected/b105de51-099b-4b81-b99d-6aa63d8821ae-kube-api-access-btqb4\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:24 crc kubenswrapper[4787]: I0127 08:03:24.841781 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" event={"ID":"b105de51-099b-4b81-b99d-6aa63d8821ae","Type":"ContainerDied","Data":"75554e486da5acb10dc400cbefb82d6888492aa052b9eeab625e4178740f5bf7"} Jan 27 08:03:24 crc kubenswrapper[4787]: I0127 08:03:24.841833 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="75554e486da5acb10dc400cbefb82d6888492aa052b9eeab625e4178740f5bf7" Jan 27 08:03:24 crc kubenswrapper[4787]: I0127 08:03:24.841869 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v" Jan 27 08:03:26 crc kubenswrapper[4787]: I0127 08:03:26.817275 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-zxwfx"] Jan 27 08:03:26 crc kubenswrapper[4787]: E0127 08:03:26.817534 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b105de51-099b-4b81-b99d-6aa63d8821ae" containerName="extract" Jan 27 08:03:26 crc kubenswrapper[4787]: I0127 08:03:26.817572 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="b105de51-099b-4b81-b99d-6aa63d8821ae" containerName="extract" Jan 27 08:03:26 crc kubenswrapper[4787]: E0127 08:03:26.817589 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b105de51-099b-4b81-b99d-6aa63d8821ae" containerName="util" Jan 27 08:03:26 crc kubenswrapper[4787]: I0127 08:03:26.817596 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="b105de51-099b-4b81-b99d-6aa63d8821ae" containerName="util" Jan 27 08:03:26 crc kubenswrapper[4787]: E0127 08:03:26.817611 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b105de51-099b-4b81-b99d-6aa63d8821ae" containerName="pull" Jan 27 08:03:26 crc kubenswrapper[4787]: I0127 08:03:26.817618 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="b105de51-099b-4b81-b99d-6aa63d8821ae" containerName="pull" Jan 27 08:03:26 crc kubenswrapper[4787]: I0127 08:03:26.817729 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="b105de51-099b-4b81-b99d-6aa63d8821ae" containerName="extract" Jan 27 08:03:26 crc kubenswrapper[4787]: I0127 08:03:26.818180 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-zxwfx" Jan 27 08:03:26 crc kubenswrapper[4787]: I0127 08:03:26.820180 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 27 08:03:26 crc kubenswrapper[4787]: I0127 08:03:26.821306 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-dcpzr" Jan 27 08:03:26 crc kubenswrapper[4787]: I0127 08:03:26.822328 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 27 08:03:26 crc kubenswrapper[4787]: I0127 08:03:26.835370 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-zxwfx"] Jan 27 08:03:26 crc kubenswrapper[4787]: I0127 08:03:26.847015 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9x6g5\" (UniqueName: \"kubernetes.io/projected/d9fe5568-1516-4ee4-8ca1-90f4ea58ba3e-kube-api-access-9x6g5\") pod \"nmstate-operator-646758c888-zxwfx\" (UID: \"d9fe5568-1516-4ee4-8ca1-90f4ea58ba3e\") " pod="openshift-nmstate/nmstate-operator-646758c888-zxwfx" Jan 27 08:03:26 crc kubenswrapper[4787]: I0127 08:03:26.947865 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9x6g5\" (UniqueName: \"kubernetes.io/projected/d9fe5568-1516-4ee4-8ca1-90f4ea58ba3e-kube-api-access-9x6g5\") pod \"nmstate-operator-646758c888-zxwfx\" (UID: \"d9fe5568-1516-4ee4-8ca1-90f4ea58ba3e\") " pod="openshift-nmstate/nmstate-operator-646758c888-zxwfx" Jan 27 08:03:26 crc kubenswrapper[4787]: I0127 08:03:26.967438 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9x6g5\" (UniqueName: \"kubernetes.io/projected/d9fe5568-1516-4ee4-8ca1-90f4ea58ba3e-kube-api-access-9x6g5\") pod \"nmstate-operator-646758c888-zxwfx\" (UID: \"d9fe5568-1516-4ee4-8ca1-90f4ea58ba3e\") " pod="openshift-nmstate/nmstate-operator-646758c888-zxwfx" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.136802 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-zxwfx" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.351618 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-zxwfx"] Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.435923 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7642m"] Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.436326 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovn-controller" containerID="cri-o://fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c" gracePeriod=30 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.436355 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="nbdb" containerID="cri-o://108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d" gracePeriod=30 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.436456 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="kube-rbac-proxy-node" containerID="cri-o://b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80" gracePeriod=30 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.436444 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="sbdb" containerID="cri-o://d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413" gracePeriod=30 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.436468 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovn-acl-logging" containerID="cri-o://fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c" gracePeriod=30 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.436617 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f" gracePeriod=30 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.436521 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="northd" containerID="cri-o://0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8" gracePeriod=30 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.484868 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" containerID="cri-o://be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c" gracePeriod=30 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.727878 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/3.log" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.730512 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovn-acl-logging/0.log" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.731099 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovn-controller/0.log" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.731638 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.788674 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-x2w6d"] Jan 27 08:03:27 crc kubenswrapper[4787]: E0127 08:03:27.788936 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.788951 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: E0127 08:03:27.788962 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="kube-rbac-proxy-ovn-metrics" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.788968 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="kube-rbac-proxy-ovn-metrics" Jan 27 08:03:27 crc kubenswrapper[4787]: E0127 08:03:27.788976 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="sbdb" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.788982 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="sbdb" Jan 27 08:03:27 crc kubenswrapper[4787]: E0127 08:03:27.788996 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovn-acl-logging" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789001 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovn-acl-logging" Jan 27 08:03:27 crc kubenswrapper[4787]: E0127 08:03:27.789014 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="northd" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789021 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="northd" Jan 27 08:03:27 crc kubenswrapper[4787]: E0127 08:03:27.789030 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789037 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: E0127 08:03:27.789043 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="kube-rbac-proxy-node" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789050 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="kube-rbac-proxy-node" Jan 27 08:03:27 crc kubenswrapper[4787]: E0127 08:03:27.789058 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789065 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: E0127 08:03:27.789081 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovn-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789089 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovn-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: E0127 08:03:27.789097 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789105 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: E0127 08:03:27.789114 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="kubecfg-setup" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789121 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="kubecfg-setup" Jan 27 08:03:27 crc kubenswrapper[4787]: E0127 08:03:27.789130 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="nbdb" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789137 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="nbdb" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789259 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="nbdb" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789278 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789287 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovn-acl-logging" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789298 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovn-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789310 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="kube-rbac-proxy-node" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789318 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="kube-rbac-proxy-ovn-metrics" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789326 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789332 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="sbdb" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789340 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789346 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789353 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="northd" Jan 27 08:03:27 crc kubenswrapper[4787]: E0127 08:03:27.789452 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789459 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.789568 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa44405c-042c-485a-ab6c-912dcd377751" containerName="ovnkube-controller" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.791120 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858207 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-openvswitch\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858250 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-log-socket\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858307 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-run-netns\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858336 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-var-lib-openvswitch\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858361 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858400 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-ovnkube-config\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858428 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-run-ovn-kubernetes\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858462 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-kubelet\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858488 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-systemd\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858457 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858486 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-log-socket" (OuterVolumeSpecName: "log-socket") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858534 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858516 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tj4tn\" (UniqueName: \"kubernetes.io/projected/fa44405c-042c-485a-ab6c-912dcd377751-kube-api-access-tj4tn\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858473 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858512 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858704 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-node-log\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858781 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-ovn\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858831 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-slash\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858855 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-cni-bin\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858873 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-cni-netd\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858942 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fa44405c-042c-485a-ab6c-912dcd377751-ovn-node-metrics-cert\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.858993 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-etc-openvswitch\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859022 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-systemd-units\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859068 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-ovnkube-script-lib\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859095 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-var-lib-cni-networks-ovn-kubernetes\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859120 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-env-overrides\") pod \"fa44405c-042c-485a-ab6c-912dcd377751\" (UID: \"fa44405c-042c-485a-ab6c-912dcd377751\") " Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859116 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859134 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859168 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859177 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-node-log" (OuterVolumeSpecName: "node-log") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859201 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859206 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859225 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-slash" (OuterVolumeSpecName: "host-slash") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859254 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859275 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859568 4787 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859582 4787 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859591 4787 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859600 4787 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859610 4787 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-log-socket\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859621 4787 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859629 4787 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859638 4787 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859646 4787 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859641 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859663 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859655 4787 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859723 4787 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-node-log\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859750 4787 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859768 4787 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-slash\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859781 4787 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.859794 4787 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.864409 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-zxwfx" event={"ID":"d9fe5568-1516-4ee4-8ca1-90f4ea58ba3e","Type":"ContainerStarted","Data":"3ef4563109c5b497ef7316fdd9db2d3de3a355d5acbde2c8650dd5a12d20907f"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.865159 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa44405c-042c-485a-ab6c-912dcd377751-kube-api-access-tj4tn" (OuterVolumeSpecName: "kube-api-access-tj4tn") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "kube-api-access-tj4tn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.867198 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa44405c-042c-485a-ab6c-912dcd377751-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.867694 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rqjpz_e6f78168-0b0d-464d-b1c7-00bb9a69c0d1/kube-multus/2.log" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.868231 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rqjpz_e6f78168-0b0d-464d-b1c7-00bb9a69c0d1/kube-multus/1.log" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.868290 4787 generic.go:334] "Generic (PLEG): container finished" podID="e6f78168-0b0d-464d-b1c7-00bb9a69c0d1" containerID="30eb607bd3c5a74648f4c24cbbf8118159296bb63fb30a76b991d8fdb94cb16a" exitCode=2 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.868376 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rqjpz" event={"ID":"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1","Type":"ContainerDied","Data":"30eb607bd3c5a74648f4c24cbbf8118159296bb63fb30a76b991d8fdb94cb16a"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.868443 4787 scope.go:117] "RemoveContainer" containerID="4924d9f32dbc90c38c70d18db5a32bdabb76d288c34457331804829e451cf169" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.868914 4787 scope.go:117] "RemoveContainer" containerID="30eb607bd3c5a74648f4c24cbbf8118159296bb63fb30a76b991d8fdb94cb16a" Jan 27 08:03:27 crc kubenswrapper[4787]: E0127 08:03:27.869095 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-rqjpz_openshift-multus(e6f78168-0b0d-464d-b1c7-00bb9a69c0d1)\"" pod="openshift-multus/multus-rqjpz" podUID="e6f78168-0b0d-464d-b1c7-00bb9a69c0d1" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.873724 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "fa44405c-042c-485a-ab6c-912dcd377751" (UID: "fa44405c-042c-485a-ab6c-912dcd377751"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.874495 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovnkube-controller/3.log" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.878926 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovn-acl-logging/0.log" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880072 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7642m_fa44405c-042c-485a-ab6c-912dcd377751/ovn-controller/0.log" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880827 4787 generic.go:334] "Generic (PLEG): container finished" podID="fa44405c-042c-485a-ab6c-912dcd377751" containerID="be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c" exitCode=0 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880854 4787 generic.go:334] "Generic (PLEG): container finished" podID="fa44405c-042c-485a-ab6c-912dcd377751" containerID="d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413" exitCode=0 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880861 4787 generic.go:334] "Generic (PLEG): container finished" podID="fa44405c-042c-485a-ab6c-912dcd377751" containerID="108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d" exitCode=0 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880869 4787 generic.go:334] "Generic (PLEG): container finished" podID="fa44405c-042c-485a-ab6c-912dcd377751" containerID="0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8" exitCode=0 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880876 4787 generic.go:334] "Generic (PLEG): container finished" podID="fa44405c-042c-485a-ab6c-912dcd377751" containerID="2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f" exitCode=0 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880883 4787 generic.go:334] "Generic (PLEG): container finished" podID="fa44405c-042c-485a-ab6c-912dcd377751" containerID="b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80" exitCode=0 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880901 4787 generic.go:334] "Generic (PLEG): container finished" podID="fa44405c-042c-485a-ab6c-912dcd377751" containerID="fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c" exitCode=143 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880909 4787 generic.go:334] "Generic (PLEG): container finished" podID="fa44405c-042c-485a-ab6c-912dcd377751" containerID="fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c" exitCode=143 Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880868 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerDied","Data":"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880948 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880957 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerDied","Data":"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880977 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerDied","Data":"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880988 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerDied","Data":"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.880999 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerDied","Data":"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881008 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerDied","Data":"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881021 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881033 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881039 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881044 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881049 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881056 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881061 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881066 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881072 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881077 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881084 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerDied","Data":"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881092 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881098 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881103 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881108 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881113 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881118 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881126 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881132 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881137 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881142 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881149 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerDied","Data":"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881157 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881163 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881169 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881174 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881179 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881184 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881189 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881194 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881199 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881205 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881211 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7642m" event={"ID":"fa44405c-042c-485a-ab6c-912dcd377751","Type":"ContainerDied","Data":"40ba2420aaaf3b3cc8595bbc5c0c66623c4fd6910892664a858be15544316d35"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881219 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881225 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881230 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881235 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881240 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881246 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881251 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881256 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881261 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.881266 4787 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b"} Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.897399 4787 scope.go:117] "RemoveContainer" containerID="be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.924002 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7642m"] Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.928766 4787 scope.go:117] "RemoveContainer" containerID="6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.932570 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7642m"] Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.950204 4787 scope.go:117] "RemoveContainer" containerID="d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.960859 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-var-lib-openvswitch\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.960922 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/053b65b3-b8e5-490f-81b8-63a5beea5f16-ovnkube-script-lib\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.960957 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-kubelet\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961043 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-etc-openvswitch\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961123 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-run-netns\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961178 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-systemd-units\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961210 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zrj4\" (UniqueName: \"kubernetes.io/projected/053b65b3-b8e5-490f-81b8-63a5beea5f16-kube-api-access-4zrj4\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961231 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/053b65b3-b8e5-490f-81b8-63a5beea5f16-ovnkube-config\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961251 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-run-systemd\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961273 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-node-log\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961295 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-cni-netd\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961319 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961350 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/053b65b3-b8e5-490f-81b8-63a5beea5f16-ovn-node-metrics-cert\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961376 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-run-ovn\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961400 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-run-openvswitch\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961433 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-run-ovn-kubernetes\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961454 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-cni-bin\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961481 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-slash\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961502 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-log-socket\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961531 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/053b65b3-b8e5-490f-81b8-63a5beea5f16-env-overrides\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961719 4787 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fa44405c-042c-485a-ab6c-912dcd377751-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961741 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tj4tn\" (UniqueName: \"kubernetes.io/projected/fa44405c-042c-485a-ab6c-912dcd377751-kube-api-access-tj4tn\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961757 4787 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fa44405c-042c-485a-ab6c-912dcd377751-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961771 4787 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.961783 4787 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fa44405c-042c-485a-ab6c-912dcd377751-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 27 08:03:27 crc kubenswrapper[4787]: I0127 08:03:27.967617 4787 scope.go:117] "RemoveContainer" containerID="108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.016393 4787 scope.go:117] "RemoveContainer" containerID="0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.049988 4787 scope.go:117] "RemoveContainer" containerID="2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063303 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-run-openvswitch\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063386 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-run-ovn-kubernetes\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063428 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-run-openvswitch\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063464 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-cni-bin\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063433 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-cni-bin\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063490 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-run-ovn-kubernetes\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063504 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-slash\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063527 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-log-socket\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063566 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/053b65b3-b8e5-490f-81b8-63a5beea5f16-env-overrides\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063604 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-var-lib-openvswitch\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063628 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/053b65b3-b8e5-490f-81b8-63a5beea5f16-ovnkube-script-lib\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063652 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-kubelet\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063674 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-etc-openvswitch\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063702 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-run-netns\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063750 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-systemd-units\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063779 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/053b65b3-b8e5-490f-81b8-63a5beea5f16-ovnkube-config\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063802 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zrj4\" (UniqueName: \"kubernetes.io/projected/053b65b3-b8e5-490f-81b8-63a5beea5f16-kube-api-access-4zrj4\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063824 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-run-systemd\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063844 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-node-log\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063866 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-cni-netd\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063910 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063952 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/053b65b3-b8e5-490f-81b8-63a5beea5f16-ovn-node-metrics-cert\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.063979 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-run-ovn\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.064044 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-run-ovn\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.064075 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-slash\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.064101 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-log-socket\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.064813 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/053b65b3-b8e5-490f-81b8-63a5beea5f16-ovnkube-config\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.064861 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-var-lib-openvswitch\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.065017 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-run-systemd\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.065060 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-node-log\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.065104 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-cni-netd\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.065140 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.065421 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/053b65b3-b8e5-490f-81b8-63a5beea5f16-ovnkube-script-lib\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.065463 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-kubelet\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.065492 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-etc-openvswitch\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.065522 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-host-run-netns\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.065688 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/053b65b3-b8e5-490f-81b8-63a5beea5f16-systemd-units\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.066951 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/053b65b3-b8e5-490f-81b8-63a5beea5f16-env-overrides\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.067701 4787 scope.go:117] "RemoveContainer" containerID="b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.070430 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/053b65b3-b8e5-490f-81b8-63a5beea5f16-ovn-node-metrics-cert\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.081457 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zrj4\" (UniqueName: \"kubernetes.io/projected/053b65b3-b8e5-490f-81b8-63a5beea5f16-kube-api-access-4zrj4\") pod \"ovnkube-node-x2w6d\" (UID: \"053b65b3-b8e5-490f-81b8-63a5beea5f16\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.106146 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.123812 4787 scope.go:117] "RemoveContainer" containerID="fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c" Jan 27 08:03:28 crc kubenswrapper[4787]: W0127 08:03:28.133301 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod053b65b3_b8e5_490f_81b8_63a5beea5f16.slice/crio-cbc85ce561f2b9cb45c3f65310a13d497be58ceb62f9eb96fd12a7bbaebac168 WatchSource:0}: Error finding container cbc85ce561f2b9cb45c3f65310a13d497be58ceb62f9eb96fd12a7bbaebac168: Status 404 returned error can't find the container with id cbc85ce561f2b9cb45c3f65310a13d497be58ceb62f9eb96fd12a7bbaebac168 Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.155154 4787 scope.go:117] "RemoveContainer" containerID="fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.177062 4787 scope.go:117] "RemoveContainer" containerID="199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.201603 4787 scope.go:117] "RemoveContainer" containerID="be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c" Jan 27 08:03:28 crc kubenswrapper[4787]: E0127 08:03:28.202576 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c\": container with ID starting with be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c not found: ID does not exist" containerID="be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.202637 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c"} err="failed to get container status \"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c\": rpc error: code = NotFound desc = could not find container \"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c\": container with ID starting with be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.202681 4787 scope.go:117] "RemoveContainer" containerID="6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637" Jan 27 08:03:28 crc kubenswrapper[4787]: E0127 08:03:28.203325 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\": container with ID starting with 6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637 not found: ID does not exist" containerID="6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.203373 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637"} err="failed to get container status \"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\": rpc error: code = NotFound desc = could not find container \"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\": container with ID starting with 6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.203405 4787 scope.go:117] "RemoveContainer" containerID="d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413" Jan 27 08:03:28 crc kubenswrapper[4787]: E0127 08:03:28.203828 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\": container with ID starting with d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413 not found: ID does not exist" containerID="d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.203885 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413"} err="failed to get container status \"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\": rpc error: code = NotFound desc = could not find container \"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\": container with ID starting with d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.203920 4787 scope.go:117] "RemoveContainer" containerID="108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d" Jan 27 08:03:28 crc kubenswrapper[4787]: E0127 08:03:28.204342 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\": container with ID starting with 108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d not found: ID does not exist" containerID="108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.204434 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d"} err="failed to get container status \"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\": rpc error: code = NotFound desc = could not find container \"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\": container with ID starting with 108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.204523 4787 scope.go:117] "RemoveContainer" containerID="0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8" Jan 27 08:03:28 crc kubenswrapper[4787]: E0127 08:03:28.204935 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\": container with ID starting with 0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8 not found: ID does not exist" containerID="0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.204967 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8"} err="failed to get container status \"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\": rpc error: code = NotFound desc = could not find container \"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\": container with ID starting with 0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.204994 4787 scope.go:117] "RemoveContainer" containerID="2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f" Jan 27 08:03:28 crc kubenswrapper[4787]: E0127 08:03:28.205332 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\": container with ID starting with 2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f not found: ID does not exist" containerID="2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.205372 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f"} err="failed to get container status \"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\": rpc error: code = NotFound desc = could not find container \"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\": container with ID starting with 2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.205402 4787 scope.go:117] "RemoveContainer" containerID="b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80" Jan 27 08:03:28 crc kubenswrapper[4787]: E0127 08:03:28.205737 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\": container with ID starting with b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80 not found: ID does not exist" containerID="b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.205830 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80"} err="failed to get container status \"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\": rpc error: code = NotFound desc = could not find container \"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\": container with ID starting with b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.205909 4787 scope.go:117] "RemoveContainer" containerID="fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c" Jan 27 08:03:28 crc kubenswrapper[4787]: E0127 08:03:28.206306 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\": container with ID starting with fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c not found: ID does not exist" containerID="fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.206378 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c"} err="failed to get container status \"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\": rpc error: code = NotFound desc = could not find container \"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\": container with ID starting with fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.206448 4787 scope.go:117] "RemoveContainer" containerID="fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c" Jan 27 08:03:28 crc kubenswrapper[4787]: E0127 08:03:28.207219 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\": container with ID starting with fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c not found: ID does not exist" containerID="fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.207261 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c"} err="failed to get container status \"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\": rpc error: code = NotFound desc = could not find container \"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\": container with ID starting with fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.207290 4787 scope.go:117] "RemoveContainer" containerID="199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b" Jan 27 08:03:28 crc kubenswrapper[4787]: E0127 08:03:28.207820 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\": container with ID starting with 199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b not found: ID does not exist" containerID="199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.207921 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b"} err="failed to get container status \"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\": rpc error: code = NotFound desc = could not find container \"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\": container with ID starting with 199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.207942 4787 scope.go:117] "RemoveContainer" containerID="be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.208362 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c"} err="failed to get container status \"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c\": rpc error: code = NotFound desc = could not find container \"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c\": container with ID starting with be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.208387 4787 scope.go:117] "RemoveContainer" containerID="6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.208661 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637"} err="failed to get container status \"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\": rpc error: code = NotFound desc = could not find container \"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\": container with ID starting with 6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.208696 4787 scope.go:117] "RemoveContainer" containerID="d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.209221 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413"} err="failed to get container status \"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\": rpc error: code = NotFound desc = could not find container \"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\": container with ID starting with d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.209355 4787 scope.go:117] "RemoveContainer" containerID="108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.209883 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d"} err="failed to get container status \"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\": rpc error: code = NotFound desc = could not find container \"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\": container with ID starting with 108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.209908 4787 scope.go:117] "RemoveContainer" containerID="0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.210196 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8"} err="failed to get container status \"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\": rpc error: code = NotFound desc = could not find container \"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\": container with ID starting with 0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.210277 4787 scope.go:117] "RemoveContainer" containerID="2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.210609 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f"} err="failed to get container status \"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\": rpc error: code = NotFound desc = could not find container \"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\": container with ID starting with 2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.210630 4787 scope.go:117] "RemoveContainer" containerID="b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.210932 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80"} err="failed to get container status \"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\": rpc error: code = NotFound desc = could not find container \"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\": container with ID starting with b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.210952 4787 scope.go:117] "RemoveContainer" containerID="fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.211189 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c"} err="failed to get container status \"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\": rpc error: code = NotFound desc = could not find container \"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\": container with ID starting with fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.211266 4787 scope.go:117] "RemoveContainer" containerID="fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.211639 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c"} err="failed to get container status \"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\": rpc error: code = NotFound desc = could not find container \"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\": container with ID starting with fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.211720 4787 scope.go:117] "RemoveContainer" containerID="199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.212144 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b"} err="failed to get container status \"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\": rpc error: code = NotFound desc = could not find container \"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\": container with ID starting with 199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.212168 4787 scope.go:117] "RemoveContainer" containerID="be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.212496 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c"} err="failed to get container status \"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c\": rpc error: code = NotFound desc = could not find container \"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c\": container with ID starting with be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.212609 4787 scope.go:117] "RemoveContainer" containerID="6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.213292 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637"} err="failed to get container status \"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\": rpc error: code = NotFound desc = could not find container \"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\": container with ID starting with 6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.213391 4787 scope.go:117] "RemoveContainer" containerID="d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.213678 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413"} err="failed to get container status \"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\": rpc error: code = NotFound desc = could not find container \"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\": container with ID starting with d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.213747 4787 scope.go:117] "RemoveContainer" containerID="108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.214233 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d"} err="failed to get container status \"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\": rpc error: code = NotFound desc = could not find container \"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\": container with ID starting with 108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.214327 4787 scope.go:117] "RemoveContainer" containerID="0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.214785 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8"} err="failed to get container status \"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\": rpc error: code = NotFound desc = could not find container \"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\": container with ID starting with 0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.214821 4787 scope.go:117] "RemoveContainer" containerID="2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.215199 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f"} err="failed to get container status \"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\": rpc error: code = NotFound desc = could not find container \"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\": container with ID starting with 2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.215235 4787 scope.go:117] "RemoveContainer" containerID="b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.215598 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80"} err="failed to get container status \"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\": rpc error: code = NotFound desc = could not find container \"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\": container with ID starting with b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.215671 4787 scope.go:117] "RemoveContainer" containerID="fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.215981 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c"} err="failed to get container status \"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\": rpc error: code = NotFound desc = could not find container \"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\": container with ID starting with fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.216050 4787 scope.go:117] "RemoveContainer" containerID="fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.216353 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c"} err="failed to get container status \"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\": rpc error: code = NotFound desc = could not find container \"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\": container with ID starting with fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.216377 4787 scope.go:117] "RemoveContainer" containerID="199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.216749 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b"} err="failed to get container status \"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\": rpc error: code = NotFound desc = could not find container \"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\": container with ID starting with 199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.216855 4787 scope.go:117] "RemoveContainer" containerID="be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.217259 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c"} err="failed to get container status \"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c\": rpc error: code = NotFound desc = could not find container \"be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c\": container with ID starting with be3770d2a2e5b599b3bac6445f4cdedb5531335a40c7e1a5727aa8b323e2394c not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.217294 4787 scope.go:117] "RemoveContainer" containerID="6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.217783 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637"} err="failed to get container status \"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\": rpc error: code = NotFound desc = could not find container \"6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637\": container with ID starting with 6d3ec81938807879c988aabe13c7bf18add843b4295e6a4228405f02b47fd637 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.217807 4787 scope.go:117] "RemoveContainer" containerID="d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.219657 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413"} err="failed to get container status \"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\": rpc error: code = NotFound desc = could not find container \"d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413\": container with ID starting with d46b5c512f7b14d97a53ce0355a7ceb08370d3e91ccef003b40386a9785df413 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.219680 4787 scope.go:117] "RemoveContainer" containerID="108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.219967 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d"} err="failed to get container status \"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\": rpc error: code = NotFound desc = could not find container \"108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d\": container with ID starting with 108cdc389c2cbb9baeb538856e70ea1ca2bce5968eb4097700b3e71e5322258d not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.219998 4787 scope.go:117] "RemoveContainer" containerID="0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.220226 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8"} err="failed to get container status \"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\": rpc error: code = NotFound desc = could not find container \"0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8\": container with ID starting with 0bcc1a1d9f6e5bd0d8cf9b36441460debd839f92291531175eb05db5070136e8 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.220239 4787 scope.go:117] "RemoveContainer" containerID="2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.220514 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f"} err="failed to get container status \"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\": rpc error: code = NotFound desc = could not find container \"2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f\": container with ID starting with 2abb29a77d0081b70495701e68f0a5a9b80656eab59ae8de10fab1450a1df49f not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.220526 4787 scope.go:117] "RemoveContainer" containerID="b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.220768 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80"} err="failed to get container status \"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\": rpc error: code = NotFound desc = could not find container \"b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80\": container with ID starting with b5f54aa358ddf7d9fe76fd45c4e3332ba2ef1fa4da77c6f38ae29209c4339a80 not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.220790 4787 scope.go:117] "RemoveContainer" containerID="fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.221062 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c"} err="failed to get container status \"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\": rpc error: code = NotFound desc = could not find container \"fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c\": container with ID starting with fbc588f712cb0167029fc80f924f52841fb904738bbcf774c53ca15a2642ef9c not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.221089 4787 scope.go:117] "RemoveContainer" containerID="fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.221376 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c"} err="failed to get container status \"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\": rpc error: code = NotFound desc = could not find container \"fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c\": container with ID starting with fc1a27dd4120a4eafd6bfbd908f9fdbc80e9b006afafb509399f2b657b129b2c not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.221401 4787 scope.go:117] "RemoveContainer" containerID="199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.221687 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b"} err="failed to get container status \"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\": rpc error: code = NotFound desc = could not find container \"199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b\": container with ID starting with 199ad62dee1b7ba9f8af9b2f3fcd77db94e8ba7dbfe2d011610c47358c17126b not found: ID does not exist" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.891922 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rqjpz_e6f78168-0b0d-464d-b1c7-00bb9a69c0d1/kube-multus/2.log" Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.895460 4787 generic.go:334] "Generic (PLEG): container finished" podID="053b65b3-b8e5-490f-81b8-63a5beea5f16" containerID="e0ba16c2e0ac88fa85c500b2cf38fd301e21168397734a8c121566dbd868ae48" exitCode=0 Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.895618 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" event={"ID":"053b65b3-b8e5-490f-81b8-63a5beea5f16","Type":"ContainerDied","Data":"e0ba16c2e0ac88fa85c500b2cf38fd301e21168397734a8c121566dbd868ae48"} Jan 27 08:03:28 crc kubenswrapper[4787]: I0127 08:03:28.895729 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" event={"ID":"053b65b3-b8e5-490f-81b8-63a5beea5f16","Type":"ContainerStarted","Data":"cbc85ce561f2b9cb45c3f65310a13d497be58ceb62f9eb96fd12a7bbaebac168"} Jan 27 08:03:29 crc kubenswrapper[4787]: I0127 08:03:29.086950 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa44405c-042c-485a-ab6c-912dcd377751" path="/var/lib/kubelet/pods/fa44405c-042c-485a-ab6c-912dcd377751/volumes" Jan 27 08:03:29 crc kubenswrapper[4787]: I0127 08:03:29.911856 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" event={"ID":"053b65b3-b8e5-490f-81b8-63a5beea5f16","Type":"ContainerStarted","Data":"2e9bf31768b6d1610948796500dce93498648bdc60d8e6a3f4d9a468a3987a39"} Jan 27 08:03:29 crc kubenswrapper[4787]: I0127 08:03:29.912361 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" event={"ID":"053b65b3-b8e5-490f-81b8-63a5beea5f16","Type":"ContainerStarted","Data":"2cd2c4a4cdcfc04d19871f2c4d6dfc8d172fed42cf55d86e3f9ccf998253c2bc"} Jan 27 08:03:29 crc kubenswrapper[4787]: I0127 08:03:29.912374 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" event={"ID":"053b65b3-b8e5-490f-81b8-63a5beea5f16","Type":"ContainerStarted","Data":"37a6d92ab73ded43196df28c8f636311ce3efb1b0983636aa31fd083313bd0e8"} Jan 27 08:03:29 crc kubenswrapper[4787]: I0127 08:03:29.912385 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" event={"ID":"053b65b3-b8e5-490f-81b8-63a5beea5f16","Type":"ContainerStarted","Data":"f558b98ac626f89919ff17dbf3313cd882c2f0d96d0ce4142944d7ce6a8d3a93"} Jan 27 08:03:29 crc kubenswrapper[4787]: I0127 08:03:29.912394 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" event={"ID":"053b65b3-b8e5-490f-81b8-63a5beea5f16","Type":"ContainerStarted","Data":"d6b4206fbe98fe487eb1c3c6c665d519fec4b9712ad785f18fe46911a460ef31"} Jan 27 08:03:29 crc kubenswrapper[4787]: I0127 08:03:29.912405 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" event={"ID":"053b65b3-b8e5-490f-81b8-63a5beea5f16","Type":"ContainerStarted","Data":"f0c0062e46e797c498bec60c4d2132cbfe34e77fa676fe066449be0713fceba2"} Jan 27 08:03:30 crc kubenswrapper[4787]: I0127 08:03:30.920112 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-zxwfx" event={"ID":"d9fe5568-1516-4ee4-8ca1-90f4ea58ba3e","Type":"ContainerStarted","Data":"271910d8ad8ae48a5d448b6da8d2f4612a930641077784cbd1093e19461b9e60"} Jan 27 08:03:30 crc kubenswrapper[4787]: I0127 08:03:30.936395 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-zxwfx" podStartSLOduration=1.7790120219999999 podStartE2EDuration="4.936377034s" podCreationTimestamp="2026-01-27 08:03:26 +0000 UTC" firstStartedPulling="2026-01-27 08:03:27.37119966 +0000 UTC m=+713.023555152" lastFinishedPulling="2026-01-27 08:03:30.528564672 +0000 UTC m=+716.180920164" observedRunningTime="2026-01-27 08:03:30.935620134 +0000 UTC m=+716.587975646" watchObservedRunningTime="2026-01-27 08:03:30.936377034 +0000 UTC m=+716.588732526" Jan 27 08:03:31 crc kubenswrapper[4787]: I0127 08:03:31.902674 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-b8zkq"] Jan 27 08:03:31 crc kubenswrapper[4787]: I0127 08:03:31.904332 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:31 crc kubenswrapper[4787]: I0127 08:03:31.906613 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-lnx49" Jan 27 08:03:31 crc kubenswrapper[4787]: I0127 08:03:31.921306 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pddbj\" (UniqueName: \"kubernetes.io/projected/86b956d1-8553-4624-8324-f0a65d627e10-kube-api-access-pddbj\") pod \"nmstate-metrics-54757c584b-b8zkq\" (UID: \"86b956d1-8553-4624-8324-f0a65d627e10\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:31 crc kubenswrapper[4787]: I0127 08:03:31.965253 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk"] Jan 27 08:03:31 crc kubenswrapper[4787]: I0127 08:03:31.966681 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:31 crc kubenswrapper[4787]: I0127 08:03:31.971026 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jan 27 08:03:31 crc kubenswrapper[4787]: I0127 08:03:31.988316 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-fjcwr"] Jan 27 08:03:31 crc kubenswrapper[4787]: I0127 08:03:31.989295 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.022807 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pddbj\" (UniqueName: \"kubernetes.io/projected/86b956d1-8553-4624-8324-f0a65d627e10-kube-api-access-pddbj\") pod \"nmstate-metrics-54757c584b-b8zkq\" (UID: \"86b956d1-8553-4624-8324-f0a65d627e10\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.067845 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pddbj\" (UniqueName: \"kubernetes.io/projected/86b956d1-8553-4624-8324-f0a65d627e10-kube-api-access-pddbj\") pod \"nmstate-metrics-54757c584b-b8zkq\" (UID: \"86b956d1-8553-4624-8324-f0a65d627e10\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.076975 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb"] Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.077863 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.080125 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.081984 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.082012 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-kllbj" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.124694 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/0131f9ae-88a6-41c3-8c76-6b2676d5e87b-dbus-socket\") pod \"nmstate-handler-fjcwr\" (UID: \"0131f9ae-88a6-41c3-8c76-6b2676d5e87b\") " pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.125576 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdjkk\" (UniqueName: \"kubernetes.io/projected/29bc137f-2121-451f-9684-2b4209b38c32-kube-api-access-fdjkk\") pod \"nmstate-webhook-8474b5b9d8-n6mzk\" (UID: \"29bc137f-2121-451f-9684-2b4209b38c32\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.125639 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/0131f9ae-88a6-41c3-8c76-6b2676d5e87b-ovs-socket\") pod \"nmstate-handler-fjcwr\" (UID: \"0131f9ae-88a6-41c3-8c76-6b2676d5e87b\") " pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.125676 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f99rx\" (UniqueName: \"kubernetes.io/projected/0131f9ae-88a6-41c3-8c76-6b2676d5e87b-kube-api-access-f99rx\") pod \"nmstate-handler-fjcwr\" (UID: \"0131f9ae-88a6-41c3-8c76-6b2676d5e87b\") " pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.125780 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/0131f9ae-88a6-41c3-8c76-6b2676d5e87b-nmstate-lock\") pod \"nmstate-handler-fjcwr\" (UID: \"0131f9ae-88a6-41c3-8c76-6b2676d5e87b\") " pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.125807 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/29bc137f-2121-451f-9684-2b4209b38c32-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-n6mzk\" (UID: \"29bc137f-2121-451f-9684-2b4209b38c32\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.222161 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.226832 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/51c3dc19-29a5-45ab-a384-fb7be3aa3f53-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-7qxzb\" (UID: \"51c3dc19-29a5-45ab-a384-fb7be3aa3f53\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.226903 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f99rx\" (UniqueName: \"kubernetes.io/projected/0131f9ae-88a6-41c3-8c76-6b2676d5e87b-kube-api-access-f99rx\") pod \"nmstate-handler-fjcwr\" (UID: \"0131f9ae-88a6-41c3-8c76-6b2676d5e87b\") " pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.226948 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnzzk\" (UniqueName: \"kubernetes.io/projected/51c3dc19-29a5-45ab-a384-fb7be3aa3f53-kube-api-access-gnzzk\") pod \"nmstate-console-plugin-7754f76f8b-7qxzb\" (UID: \"51c3dc19-29a5-45ab-a384-fb7be3aa3f53\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.227010 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/0131f9ae-88a6-41c3-8c76-6b2676d5e87b-nmstate-lock\") pod \"nmstate-handler-fjcwr\" (UID: \"0131f9ae-88a6-41c3-8c76-6b2676d5e87b\") " pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.227050 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/29bc137f-2121-451f-9684-2b4209b38c32-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-n6mzk\" (UID: \"29bc137f-2121-451f-9684-2b4209b38c32\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.227087 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/51c3dc19-29a5-45ab-a384-fb7be3aa3f53-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-7qxzb\" (UID: \"51c3dc19-29a5-45ab-a384-fb7be3aa3f53\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.227117 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/0131f9ae-88a6-41c3-8c76-6b2676d5e87b-dbus-socket\") pod \"nmstate-handler-fjcwr\" (UID: \"0131f9ae-88a6-41c3-8c76-6b2676d5e87b\") " pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.227159 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdjkk\" (UniqueName: \"kubernetes.io/projected/29bc137f-2121-451f-9684-2b4209b38c32-kube-api-access-fdjkk\") pod \"nmstate-webhook-8474b5b9d8-n6mzk\" (UID: \"29bc137f-2121-451f-9684-2b4209b38c32\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.227193 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/0131f9ae-88a6-41c3-8c76-6b2676d5e87b-ovs-socket\") pod \"nmstate-handler-fjcwr\" (UID: \"0131f9ae-88a6-41c3-8c76-6b2676d5e87b\") " pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.227282 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/0131f9ae-88a6-41c3-8c76-6b2676d5e87b-ovs-socket\") pod \"nmstate-handler-fjcwr\" (UID: \"0131f9ae-88a6-41c3-8c76-6b2676d5e87b\") " pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.227750 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/0131f9ae-88a6-41c3-8c76-6b2676d5e87b-dbus-socket\") pod \"nmstate-handler-fjcwr\" (UID: \"0131f9ae-88a6-41c3-8c76-6b2676d5e87b\") " pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.227980 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/0131f9ae-88a6-41c3-8c76-6b2676d5e87b-nmstate-lock\") pod \"nmstate-handler-fjcwr\" (UID: \"0131f9ae-88a6-41c3-8c76-6b2676d5e87b\") " pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.234249 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/29bc137f-2121-451f-9684-2b4209b38c32-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-n6mzk\" (UID: \"29bc137f-2121-451f-9684-2b4209b38c32\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.248442 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f99rx\" (UniqueName: \"kubernetes.io/projected/0131f9ae-88a6-41c3-8c76-6b2676d5e87b-kube-api-access-f99rx\") pod \"nmstate-handler-fjcwr\" (UID: \"0131f9ae-88a6-41c3-8c76-6b2676d5e87b\") " pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.257121 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdjkk\" (UniqueName: \"kubernetes.io/projected/29bc137f-2121-451f-9684-2b4209b38c32-kube-api-access-fdjkk\") pod \"nmstate-webhook-8474b5b9d8-n6mzk\" (UID: \"29bc137f-2121-451f-9684-2b4209b38c32\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.259537 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6956947bc8-fxxct"] Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.260345 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: E0127 08:03:32.266281 4787 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-metrics-54757c584b-b8zkq_openshift-nmstate_86b956d1-8553-4624-8324-f0a65d627e10_0(5decd527f71143284767151d9bc5ba44f95628c1b2ff00f96f7d6db9a0fce2d6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 08:03:32 crc kubenswrapper[4787]: E0127 08:03:32.266360 4787 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-metrics-54757c584b-b8zkq_openshift-nmstate_86b956d1-8553-4624-8324-f0a65d627e10_0(5decd527f71143284767151d9bc5ba44f95628c1b2ff00f96f7d6db9a0fce2d6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:32 crc kubenswrapper[4787]: E0127 08:03:32.266382 4787 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-metrics-54757c584b-b8zkq_openshift-nmstate_86b956d1-8553-4624-8324-f0a65d627e10_0(5decd527f71143284767151d9bc5ba44f95628c1b2ff00f96f7d6db9a0fce2d6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:32 crc kubenswrapper[4787]: E0127 08:03:32.266433 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-metrics-54757c584b-b8zkq_openshift-nmstate(86b956d1-8553-4624-8324-f0a65d627e10)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-metrics-54757c584b-b8zkq_openshift-nmstate(86b956d1-8553-4624-8324-f0a65d627e10)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-metrics-54757c584b-b8zkq_openshift-nmstate_86b956d1-8553-4624-8324-f0a65d627e10_0(5decd527f71143284767151d9bc5ba44f95628c1b2ff00f96f7d6db9a0fce2d6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" podUID="86b956d1-8553-4624-8324-f0a65d627e10" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.298105 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.309045 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.328096 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/51c3dc19-29a5-45ab-a384-fb7be3aa3f53-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-7qxzb\" (UID: \"51c3dc19-29a5-45ab-a384-fb7be3aa3f53\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.328190 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/51c3dc19-29a5-45ab-a384-fb7be3aa3f53-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-7qxzb\" (UID: \"51c3dc19-29a5-45ab-a384-fb7be3aa3f53\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.328224 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnzzk\" (UniqueName: \"kubernetes.io/projected/51c3dc19-29a5-45ab-a384-fb7be3aa3f53-kube-api-access-gnzzk\") pod \"nmstate-console-plugin-7754f76f8b-7qxzb\" (UID: \"51c3dc19-29a5-45ab-a384-fb7be3aa3f53\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:32 crc kubenswrapper[4787]: E0127 08:03:32.328440 4787 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Jan 27 08:03:32 crc kubenswrapper[4787]: E0127 08:03:32.328543 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/51c3dc19-29a5-45ab-a384-fb7be3aa3f53-plugin-serving-cert podName:51c3dc19-29a5-45ab-a384-fb7be3aa3f53 nodeName:}" failed. No retries permitted until 2026-01-27 08:03:32.8285203 +0000 UTC m=+718.480875792 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/51c3dc19-29a5-45ab-a384-fb7be3aa3f53-plugin-serving-cert") pod "nmstate-console-plugin-7754f76f8b-7qxzb" (UID: "51c3dc19-29a5-45ab-a384-fb7be3aa3f53") : secret "plugin-serving-cert" not found Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.329121 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/51c3dc19-29a5-45ab-a384-fb7be3aa3f53-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-7qxzb\" (UID: \"51c3dc19-29a5-45ab-a384-fb7be3aa3f53\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:32 crc kubenswrapper[4787]: E0127 08:03:32.336373 4787 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate_29bc137f-2121-451f-9684-2b4209b38c32_0(c472175cccd40f44f355bbc14fd0278bf140ecf48090f128c219aa14005ad7f8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 08:03:32 crc kubenswrapper[4787]: E0127 08:03:32.336444 4787 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate_29bc137f-2121-451f-9684-2b4209b38c32_0(c472175cccd40f44f355bbc14fd0278bf140ecf48090f128c219aa14005ad7f8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:32 crc kubenswrapper[4787]: E0127 08:03:32.336467 4787 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate_29bc137f-2121-451f-9684-2b4209b38c32_0(c472175cccd40f44f355bbc14fd0278bf140ecf48090f128c219aa14005ad7f8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:32 crc kubenswrapper[4787]: E0127 08:03:32.336523 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate(29bc137f-2121-451f-9684-2b4209b38c32)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate(29bc137f-2121-451f-9684-2b4209b38c32)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate_29bc137f-2121-451f-9684-2b4209b38c32_0(c472175cccd40f44f355bbc14fd0278bf140ecf48090f128c219aa14005ad7f8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" podUID="29bc137f-2121-451f-9684-2b4209b38c32" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.344705 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnzzk\" (UniqueName: \"kubernetes.io/projected/51c3dc19-29a5-45ab-a384-fb7be3aa3f53-kube-api-access-gnzzk\") pod \"nmstate-console-plugin-7754f76f8b-7qxzb\" (UID: \"51c3dc19-29a5-45ab-a384-fb7be3aa3f53\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:32 crc kubenswrapper[4787]: W0127 08:03:32.348630 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0131f9ae_88a6_41c3_8c76_6b2676d5e87b.slice/crio-75dab7d4d875e1aff23c44820d9f34431de6bd53b4e8cb58fd290f9fc387faf9 WatchSource:0}: Error finding container 75dab7d4d875e1aff23c44820d9f34431de6bd53b4e8cb58fd290f9fc387faf9: Status 404 returned error can't find the container with id 75dab7d4d875e1aff23c44820d9f34431de6bd53b4e8cb58fd290f9fc387faf9 Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.430156 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6b8a422e-b84a-431f-8634-ca1a64aec04e-trusted-ca-bundle\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.430244 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jq4kw\" (UniqueName: \"kubernetes.io/projected/6b8a422e-b84a-431f-8634-ca1a64aec04e-kube-api-access-jq4kw\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.430330 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6b8a422e-b84a-431f-8634-ca1a64aec04e-console-config\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.430594 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6b8a422e-b84a-431f-8634-ca1a64aec04e-oauth-serving-cert\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.430666 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6b8a422e-b84a-431f-8634-ca1a64aec04e-service-ca\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.430713 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6b8a422e-b84a-431f-8634-ca1a64aec04e-console-serving-cert\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.430748 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6b8a422e-b84a-431f-8634-ca1a64aec04e-console-oauth-config\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.531630 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6b8a422e-b84a-431f-8634-ca1a64aec04e-trusted-ca-bundle\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.531726 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jq4kw\" (UniqueName: \"kubernetes.io/projected/6b8a422e-b84a-431f-8634-ca1a64aec04e-kube-api-access-jq4kw\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.531825 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6b8a422e-b84a-431f-8634-ca1a64aec04e-console-config\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.531895 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6b8a422e-b84a-431f-8634-ca1a64aec04e-oauth-serving-cert\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.531931 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6b8a422e-b84a-431f-8634-ca1a64aec04e-service-ca\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.531970 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6b8a422e-b84a-431f-8634-ca1a64aec04e-console-serving-cert\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.532188 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6b8a422e-b84a-431f-8634-ca1a64aec04e-console-oauth-config\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.532937 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6b8a422e-b84a-431f-8634-ca1a64aec04e-trusted-ca-bundle\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.532996 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6b8a422e-b84a-431f-8634-ca1a64aec04e-console-config\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.533395 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6b8a422e-b84a-431f-8634-ca1a64aec04e-service-ca\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.533464 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6b8a422e-b84a-431f-8634-ca1a64aec04e-oauth-serving-cert\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.536684 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6b8a422e-b84a-431f-8634-ca1a64aec04e-console-serving-cert\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.537258 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6b8a422e-b84a-431f-8634-ca1a64aec04e-console-oauth-config\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.550962 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jq4kw\" (UniqueName: \"kubernetes.io/projected/6b8a422e-b84a-431f-8634-ca1a64aec04e-kube-api-access-jq4kw\") pod \"console-6956947bc8-fxxct\" (UID: \"6b8a422e-b84a-431f-8634-ca1a64aec04e\") " pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.605425 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: E0127 08:03:32.629821 4787 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_console-6956947bc8-fxxct_openshift-console_6b8a422e-b84a-431f-8634-ca1a64aec04e_0(99e6b16e01944593ea407852dae4bdf79ab809e5b4a9bed84bdaa5d51c6bbb31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 08:03:32 crc kubenswrapper[4787]: E0127 08:03:32.629935 4787 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_console-6956947bc8-fxxct_openshift-console_6b8a422e-b84a-431f-8634-ca1a64aec04e_0(99e6b16e01944593ea407852dae4bdf79ab809e5b4a9bed84bdaa5d51c6bbb31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: E0127 08:03:32.629956 4787 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_console-6956947bc8-fxxct_openshift-console_6b8a422e-b84a-431f-8634-ca1a64aec04e_0(99e6b16e01944593ea407852dae4bdf79ab809e5b4a9bed84bdaa5d51c6bbb31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:32 crc kubenswrapper[4787]: E0127 08:03:32.630013 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"console-6956947bc8-fxxct_openshift-console(6b8a422e-b84a-431f-8634-ca1a64aec04e)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"console-6956947bc8-fxxct_openshift-console(6b8a422e-b84a-431f-8634-ca1a64aec04e)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_console-6956947bc8-fxxct_openshift-console_6b8a422e-b84a-431f-8634-ca1a64aec04e_0(99e6b16e01944593ea407852dae4bdf79ab809e5b4a9bed84bdaa5d51c6bbb31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-console/console-6956947bc8-fxxct" podUID="6b8a422e-b84a-431f-8634-ca1a64aec04e" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.835949 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/51c3dc19-29a5-45ab-a384-fb7be3aa3f53-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-7qxzb\" (UID: \"51c3dc19-29a5-45ab-a384-fb7be3aa3f53\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.840609 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/51c3dc19-29a5-45ab-a384-fb7be3aa3f53-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-7qxzb\" (UID: \"51c3dc19-29a5-45ab-a384-fb7be3aa3f53\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.939896 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-fjcwr" event={"ID":"0131f9ae-88a6-41c3-8c76-6b2676d5e87b","Type":"ContainerStarted","Data":"75dab7d4d875e1aff23c44820d9f34431de6bd53b4e8cb58fd290f9fc387faf9"} Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.946215 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" event={"ID":"053b65b3-b8e5-490f-81b8-63a5beea5f16","Type":"ContainerStarted","Data":"8c7bde2bb5dbf385ed55f577accc9af0aaf39618dcc77eaed6d37c6574946364"} Jan 27 08:03:32 crc kubenswrapper[4787]: I0127 08:03:32.995190 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:33 crc kubenswrapper[4787]: E0127 08:03:33.038686 4787 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate_51c3dc19-29a5-45ab-a384-fb7be3aa3f53_0(4081e46c154b883502413798d0b5a5b2aabfdfbaa6ec0d2541ef3f74414c93e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 08:03:33 crc kubenswrapper[4787]: E0127 08:03:33.038772 4787 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate_51c3dc19-29a5-45ab-a384-fb7be3aa3f53_0(4081e46c154b883502413798d0b5a5b2aabfdfbaa6ec0d2541ef3f74414c93e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:33 crc kubenswrapper[4787]: E0127 08:03:33.038807 4787 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate_51c3dc19-29a5-45ab-a384-fb7be3aa3f53_0(4081e46c154b883502413798d0b5a5b2aabfdfbaa6ec0d2541ef3f74414c93e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:33 crc kubenswrapper[4787]: E0127 08:03:33.038864 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate(51c3dc19-29a5-45ab-a384-fb7be3aa3f53)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate(51c3dc19-29a5-45ab-a384-fb7be3aa3f53)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate_51c3dc19-29a5-45ab-a384-fb7be3aa3f53_0(4081e46c154b883502413798d0b5a5b2aabfdfbaa6ec0d2541ef3f74414c93e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" podUID="51c3dc19-29a5-45ab-a384-fb7be3aa3f53" Jan 27 08:03:34 crc kubenswrapper[4787]: I0127 08:03:34.964707 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" event={"ID":"053b65b3-b8e5-490f-81b8-63a5beea5f16","Type":"ContainerStarted","Data":"1d3eec5328e0d73621299a6eb958d1f5a82fbeb693a50277378e81014ee3e0f9"} Jan 27 08:03:34 crc kubenswrapper[4787]: I0127 08:03:34.965381 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:34 crc kubenswrapper[4787]: I0127 08:03:34.998617 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk"] Jan 27 08:03:34 crc kubenswrapper[4787]: I0127 08:03:34.998764 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.000458 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.013416 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb"] Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.014981 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.015498 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.021405 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.029890 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6956947bc8-fxxct"] Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.030256 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.030843 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.035846 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" podStartSLOduration=8.035799609 podStartE2EDuration="8.035799609s" podCreationTimestamp="2026-01-27 08:03:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 08:03:34.998200589 +0000 UTC m=+720.650556111" watchObservedRunningTime="2026-01-27 08:03:35.035799609 +0000 UTC m=+720.688155101" Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.043521 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-b8zkq"] Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.043707 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.044301 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.309181 4787 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_console-6956947bc8-fxxct_openshift-console_6b8a422e-b84a-431f-8634-ca1a64aec04e_0(4e9d42d231cc46ab2e837f99211814c4ddf95054b433ec4b38794a186fe5ac99): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.309263 4787 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_console-6956947bc8-fxxct_openshift-console_6b8a422e-b84a-431f-8634-ca1a64aec04e_0(4e9d42d231cc46ab2e837f99211814c4ddf95054b433ec4b38794a186fe5ac99): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.309298 4787 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_console-6956947bc8-fxxct_openshift-console_6b8a422e-b84a-431f-8634-ca1a64aec04e_0(4e9d42d231cc46ab2e837f99211814c4ddf95054b433ec4b38794a186fe5ac99): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.309367 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"console-6956947bc8-fxxct_openshift-console(6b8a422e-b84a-431f-8634-ca1a64aec04e)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"console-6956947bc8-fxxct_openshift-console(6b8a422e-b84a-431f-8634-ca1a64aec04e)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_console-6956947bc8-fxxct_openshift-console_6b8a422e-b84a-431f-8634-ca1a64aec04e_0(4e9d42d231cc46ab2e837f99211814c4ddf95054b433ec4b38794a186fe5ac99): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-console/console-6956947bc8-fxxct" podUID="6b8a422e-b84a-431f-8634-ca1a64aec04e" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.336419 4787 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-metrics-54757c584b-b8zkq_openshift-nmstate_86b956d1-8553-4624-8324-f0a65d627e10_0(e67cc5005b0e82742465bf968446985c56062bdd46fa09adffcf6fd31db91121): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.336568 4787 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-metrics-54757c584b-b8zkq_openshift-nmstate_86b956d1-8553-4624-8324-f0a65d627e10_0(e67cc5005b0e82742465bf968446985c56062bdd46fa09adffcf6fd31db91121): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.336597 4787 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-metrics-54757c584b-b8zkq_openshift-nmstate_86b956d1-8553-4624-8324-f0a65d627e10_0(e67cc5005b0e82742465bf968446985c56062bdd46fa09adffcf6fd31db91121): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.336676 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-metrics-54757c584b-b8zkq_openshift-nmstate(86b956d1-8553-4624-8324-f0a65d627e10)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-metrics-54757c584b-b8zkq_openshift-nmstate(86b956d1-8553-4624-8324-f0a65d627e10)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-metrics-54757c584b-b8zkq_openshift-nmstate_86b956d1-8553-4624-8324-f0a65d627e10_0(e67cc5005b0e82742465bf968446985c56062bdd46fa09adffcf6fd31db91121): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" podUID="86b956d1-8553-4624-8324-f0a65d627e10" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.349303 4787 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate_29bc137f-2121-451f-9684-2b4209b38c32_0(15b2adfc2f8aa26a36c69cb87b40f0e404742ea78d0e57bf01d3c49d4bed4a5c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.349430 4787 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate_29bc137f-2121-451f-9684-2b4209b38c32_0(15b2adfc2f8aa26a36c69cb87b40f0e404742ea78d0e57bf01d3c49d4bed4a5c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.349481 4787 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate_29bc137f-2121-451f-9684-2b4209b38c32_0(15b2adfc2f8aa26a36c69cb87b40f0e404742ea78d0e57bf01d3c49d4bed4a5c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.349576 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate(29bc137f-2121-451f-9684-2b4209b38c32)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate(29bc137f-2121-451f-9684-2b4209b38c32)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate_29bc137f-2121-451f-9684-2b4209b38c32_0(15b2adfc2f8aa26a36c69cb87b40f0e404742ea78d0e57bf01d3c49d4bed4a5c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" podUID="29bc137f-2121-451f-9684-2b4209b38c32" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.380928 4787 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate_51c3dc19-29a5-45ab-a384-fb7be3aa3f53_0(e5093bba388cc695658dd09347e47b93aecf62edae4e389da0eddd793ffbec51): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.381040 4787 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate_51c3dc19-29a5-45ab-a384-fb7be3aa3f53_0(e5093bba388cc695658dd09347e47b93aecf62edae4e389da0eddd793ffbec51): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.381076 4787 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate_51c3dc19-29a5-45ab-a384-fb7be3aa3f53_0(e5093bba388cc695658dd09347e47b93aecf62edae4e389da0eddd793ffbec51): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:35 crc kubenswrapper[4787]: E0127 08:03:35.381148 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate(51c3dc19-29a5-45ab-a384-fb7be3aa3f53)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate(51c3dc19-29a5-45ab-a384-fb7be3aa3f53)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate_51c3dc19-29a5-45ab-a384-fb7be3aa3f53_0(e5093bba388cc695658dd09347e47b93aecf62edae4e389da0eddd793ffbec51): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" podUID="51c3dc19-29a5-45ab-a384-fb7be3aa3f53" Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.974414 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-fjcwr" event={"ID":"0131f9ae-88a6-41c3-8c76-6b2676d5e87b","Type":"ContainerStarted","Data":"d3ce78724e5290d4782214e377ebca4244ecca841925ab607a8ced4ee9f7ebbd"} Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.974907 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.974930 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.974946 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:35 crc kubenswrapper[4787]: I0127 08:03:35.997055 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-fjcwr" podStartSLOduration=2.057954689 podStartE2EDuration="4.997031241s" podCreationTimestamp="2026-01-27 08:03:31 +0000 UTC" firstStartedPulling="2026-01-27 08:03:32.350991603 +0000 UTC m=+718.003347095" lastFinishedPulling="2026-01-27 08:03:35.290068155 +0000 UTC m=+720.942423647" observedRunningTime="2026-01-27 08:03:35.993684726 +0000 UTC m=+721.646040268" watchObservedRunningTime="2026-01-27 08:03:35.997031241 +0000 UTC m=+721.649386733" Jan 27 08:03:36 crc kubenswrapper[4787]: I0127 08:03:36.024107 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:42 crc kubenswrapper[4787]: I0127 08:03:42.077905 4787 scope.go:117] "RemoveContainer" containerID="30eb607bd3c5a74648f4c24cbbf8118159296bb63fb30a76b991d8fdb94cb16a" Jan 27 08:03:42 crc kubenswrapper[4787]: E0127 08:03:42.078684 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-rqjpz_openshift-multus(e6f78168-0b0d-464d-b1c7-00bb9a69c0d1)\"" pod="openshift-multus/multus-rqjpz" podUID="e6f78168-0b0d-464d-b1c7-00bb9a69c0d1" Jan 27 08:03:42 crc kubenswrapper[4787]: I0127 08:03:42.338188 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-fjcwr" Jan 27 08:03:46 crc kubenswrapper[4787]: I0127 08:03:46.076117 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:46 crc kubenswrapper[4787]: I0127 08:03:46.077061 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:46 crc kubenswrapper[4787]: E0127 08:03:46.141947 4787 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_console-6956947bc8-fxxct_openshift-console_6b8a422e-b84a-431f-8634-ca1a64aec04e_0(02d37f1dee94b0bb77886bf62146be76036f653f4e4fc86d46341898423acb52): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 08:03:46 crc kubenswrapper[4787]: E0127 08:03:46.142076 4787 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_console-6956947bc8-fxxct_openshift-console_6b8a422e-b84a-431f-8634-ca1a64aec04e_0(02d37f1dee94b0bb77886bf62146be76036f653f4e4fc86d46341898423acb52): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:46 crc kubenswrapper[4787]: E0127 08:03:46.142117 4787 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_console-6956947bc8-fxxct_openshift-console_6b8a422e-b84a-431f-8634-ca1a64aec04e_0(02d37f1dee94b0bb77886bf62146be76036f653f4e4fc86d46341898423acb52): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:46 crc kubenswrapper[4787]: E0127 08:03:46.142200 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"console-6956947bc8-fxxct_openshift-console(6b8a422e-b84a-431f-8634-ca1a64aec04e)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"console-6956947bc8-fxxct_openshift-console(6b8a422e-b84a-431f-8634-ca1a64aec04e)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_console-6956947bc8-fxxct_openshift-console_6b8a422e-b84a-431f-8634-ca1a64aec04e_0(02d37f1dee94b0bb77886bf62146be76036f653f4e4fc86d46341898423acb52): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-console/console-6956947bc8-fxxct" podUID="6b8a422e-b84a-431f-8634-ca1a64aec04e" Jan 27 08:03:49 crc kubenswrapper[4787]: I0127 08:03:49.075954 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:49 crc kubenswrapper[4787]: I0127 08:03:49.076762 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:49 crc kubenswrapper[4787]: E0127 08:03:49.127340 4787 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-metrics-54757c584b-b8zkq_openshift-nmstate_86b956d1-8553-4624-8324-f0a65d627e10_0(9ea8beecff1acd76804bc8a934ae567d0534d456e387c14d372f849753bc3f25): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 08:03:49 crc kubenswrapper[4787]: E0127 08:03:49.128024 4787 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-metrics-54757c584b-b8zkq_openshift-nmstate_86b956d1-8553-4624-8324-f0a65d627e10_0(9ea8beecff1acd76804bc8a934ae567d0534d456e387c14d372f849753bc3f25): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:49 crc kubenswrapper[4787]: E0127 08:03:49.128079 4787 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-metrics-54757c584b-b8zkq_openshift-nmstate_86b956d1-8553-4624-8324-f0a65d627e10_0(9ea8beecff1acd76804bc8a934ae567d0534d456e387c14d372f849753bc3f25): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:03:49 crc kubenswrapper[4787]: E0127 08:03:49.128189 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-metrics-54757c584b-b8zkq_openshift-nmstate(86b956d1-8553-4624-8324-f0a65d627e10)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-metrics-54757c584b-b8zkq_openshift-nmstate(86b956d1-8553-4624-8324-f0a65d627e10)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-metrics-54757c584b-b8zkq_openshift-nmstate_86b956d1-8553-4624-8324-f0a65d627e10_0(9ea8beecff1acd76804bc8a934ae567d0534d456e387c14d372f849753bc3f25): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" podUID="86b956d1-8553-4624-8324-f0a65d627e10" Jan 27 08:03:50 crc kubenswrapper[4787]: I0127 08:03:50.075705 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:50 crc kubenswrapper[4787]: I0127 08:03:50.075793 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:50 crc kubenswrapper[4787]: I0127 08:03:50.076404 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:50 crc kubenswrapper[4787]: I0127 08:03:50.077188 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:50 crc kubenswrapper[4787]: E0127 08:03:50.148533 4787 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate_51c3dc19-29a5-45ab-a384-fb7be3aa3f53_0(819a04c8f2c0cfc956ad67db26726f7c5d8cfb1d427ad1f5d10dee0733a5a37b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 08:03:50 crc kubenswrapper[4787]: E0127 08:03:50.148653 4787 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate_51c3dc19-29a5-45ab-a384-fb7be3aa3f53_0(819a04c8f2c0cfc956ad67db26726f7c5d8cfb1d427ad1f5d10dee0733a5a37b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:50 crc kubenswrapper[4787]: E0127 08:03:50.148687 4787 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate_51c3dc19-29a5-45ab-a384-fb7be3aa3f53_0(819a04c8f2c0cfc956ad67db26726f7c5d8cfb1d427ad1f5d10dee0733a5a37b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:03:50 crc kubenswrapper[4787]: E0127 08:03:50.148896 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate(51c3dc19-29a5-45ab-a384-fb7be3aa3f53)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate(51c3dc19-29a5-45ab-a384-fb7be3aa3f53)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-console-plugin-7754f76f8b-7qxzb_openshift-nmstate_51c3dc19-29a5-45ab-a384-fb7be3aa3f53_0(819a04c8f2c0cfc956ad67db26726f7c5d8cfb1d427ad1f5d10dee0733a5a37b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" podUID="51c3dc19-29a5-45ab-a384-fb7be3aa3f53" Jan 27 08:03:50 crc kubenswrapper[4787]: E0127 08:03:50.160364 4787 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate_29bc137f-2121-451f-9684-2b4209b38c32_0(484ac11b2a4c44529a76b4c5b543561ce33ddeb29eff28cb730166903109d282): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 08:03:50 crc kubenswrapper[4787]: E0127 08:03:50.160421 4787 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate_29bc137f-2121-451f-9684-2b4209b38c32_0(484ac11b2a4c44529a76b4c5b543561ce33ddeb29eff28cb730166903109d282): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:50 crc kubenswrapper[4787]: E0127 08:03:50.160441 4787 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate_29bc137f-2121-451f-9684-2b4209b38c32_0(484ac11b2a4c44529a76b4c5b543561ce33ddeb29eff28cb730166903109d282): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:03:50 crc kubenswrapper[4787]: E0127 08:03:50.160492 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate(29bc137f-2121-451f-9684-2b4209b38c32)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate(29bc137f-2121-451f-9684-2b4209b38c32)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-webhook-8474b5b9d8-n6mzk_openshift-nmstate_29bc137f-2121-451f-9684-2b4209b38c32_0(484ac11b2a4c44529a76b4c5b543561ce33ddeb29eff28cb730166903109d282): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" podUID="29bc137f-2121-451f-9684-2b4209b38c32" Jan 27 08:03:53 crc kubenswrapper[4787]: I0127 08:03:53.077512 4787 scope.go:117] "RemoveContainer" containerID="30eb607bd3c5a74648f4c24cbbf8118159296bb63fb30a76b991d8fdb94cb16a" Jan 27 08:03:54 crc kubenswrapper[4787]: I0127 08:03:54.128614 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rqjpz_e6f78168-0b0d-464d-b1c7-00bb9a69c0d1/kube-multus/2.log" Jan 27 08:03:54 crc kubenswrapper[4787]: I0127 08:03:54.129041 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rqjpz" event={"ID":"e6f78168-0b0d-464d-b1c7-00bb9a69c0d1","Type":"ContainerStarted","Data":"c8fbdabbb3a550f439f21f48b97e5b5a91b35ed06ed9be0e3cf6a154da8b69ba"} Jan 27 08:03:58 crc kubenswrapper[4787]: I0127 08:03:58.137472 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-x2w6d" Jan 27 08:03:59 crc kubenswrapper[4787]: I0127 08:03:59.076166 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:03:59 crc kubenswrapper[4787]: I0127 08:03:59.077103 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:04:00 crc kubenswrapper[4787]: I0127 08:04:00.321844 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6956947bc8-fxxct"] Jan 27 08:04:01 crc kubenswrapper[4787]: I0127 08:04:01.303113 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6956947bc8-fxxct" event={"ID":"6b8a422e-b84a-431f-8634-ca1a64aec04e","Type":"ContainerStarted","Data":"6cdefda9f2996e1bf97bae5d69fbcc9b0b7d4b35443e21e38d5ed3d70f2cac69"} Jan 27 08:04:01 crc kubenswrapper[4787]: I0127 08:04:01.304091 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6956947bc8-fxxct" event={"ID":"6b8a422e-b84a-431f-8634-ca1a64aec04e","Type":"ContainerStarted","Data":"6a111a1aa74110e18f9e81fb8a5f192f42ffe5add94a1e5903bf971a7edc2435"} Jan 27 08:04:02 crc kubenswrapper[4787]: I0127 08:04:02.075895 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:04:02 crc kubenswrapper[4787]: I0127 08:04:02.076767 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" Jan 27 08:04:02 crc kubenswrapper[4787]: I0127 08:04:02.327573 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6956947bc8-fxxct" podStartSLOduration=30.327529021 podStartE2EDuration="30.327529021s" podCreationTimestamp="2026-01-27 08:03:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 08:04:01.327002525 +0000 UTC m=+746.979358027" watchObservedRunningTime="2026-01-27 08:04:02.327529021 +0000 UTC m=+747.979884513" Jan 27 08:04:02 crc kubenswrapper[4787]: I0127 08:04:02.331334 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-b8zkq"] Jan 27 08:04:02 crc kubenswrapper[4787]: I0127 08:04:02.606187 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:04:02 crc kubenswrapper[4787]: I0127 08:04:02.606259 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:04:02 crc kubenswrapper[4787]: I0127 08:04:02.611947 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:04:03 crc kubenswrapper[4787]: I0127 08:04:03.077489 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:04:03 crc kubenswrapper[4787]: I0127 08:04:03.077945 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:04:03 crc kubenswrapper[4787]: I0127 08:04:03.322975 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" event={"ID":"86b956d1-8553-4624-8324-f0a65d627e10","Type":"ContainerStarted","Data":"ac5bbf0cf925cf4225cad32343c9aaeab211401a3f619ffec8542d57173393f2"} Jan 27 08:04:03 crc kubenswrapper[4787]: I0127 08:04:03.324009 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk"] Jan 27 08:04:03 crc kubenswrapper[4787]: I0127 08:04:03.327806 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6956947bc8-fxxct" Jan 27 08:04:03 crc kubenswrapper[4787]: W0127 08:04:03.332882 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29bc137f_2121_451f_9684_2b4209b38c32.slice/crio-af4ccb8cff216e28c419c10ba7aac19872eb26750ecec0d89fdf7281b8269fc8 WatchSource:0}: Error finding container af4ccb8cff216e28c419c10ba7aac19872eb26750ecec0d89fdf7281b8269fc8: Status 404 returned error can't find the container with id af4ccb8cff216e28c419c10ba7aac19872eb26750ecec0d89fdf7281b8269fc8 Jan 27 08:04:03 crc kubenswrapper[4787]: I0127 08:04:03.378607 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-qptnb"] Jan 27 08:04:04 crc kubenswrapper[4787]: I0127 08:04:04.075637 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:04:04 crc kubenswrapper[4787]: I0127 08:04:04.077520 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" Jan 27 08:04:04 crc kubenswrapper[4787]: I0127 08:04:04.293977 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb"] Jan 27 08:04:04 crc kubenswrapper[4787]: W0127 08:04:04.304913 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51c3dc19_29a5_45ab_a384_fb7be3aa3f53.slice/crio-1d40f2760ae6c959164d8ab633b3df8ae28ef9fefe607d011d02802a8cb5efea WatchSource:0}: Error finding container 1d40f2760ae6c959164d8ab633b3df8ae28ef9fefe607d011d02802a8cb5efea: Status 404 returned error can't find the container with id 1d40f2760ae6c959164d8ab633b3df8ae28ef9fefe607d011d02802a8cb5efea Jan 27 08:04:04 crc kubenswrapper[4787]: I0127 08:04:04.331499 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" event={"ID":"51c3dc19-29a5-45ab-a384-fb7be3aa3f53","Type":"ContainerStarted","Data":"1d40f2760ae6c959164d8ab633b3df8ae28ef9fefe607d011d02802a8cb5efea"} Jan 27 08:04:04 crc kubenswrapper[4787]: I0127 08:04:04.332885 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" event={"ID":"29bc137f-2121-451f-9684-2b4209b38c32","Type":"ContainerStarted","Data":"af4ccb8cff216e28c419c10ba7aac19872eb26750ecec0d89fdf7281b8269fc8"} Jan 27 08:04:04 crc kubenswrapper[4787]: I0127 08:04:04.334922 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" event={"ID":"86b956d1-8553-4624-8324-f0a65d627e10","Type":"ContainerStarted","Data":"dbb9464c9640fd090519fda3a92c9a6f10a5d43f0e361834dd925bccc4f33927"} Jan 27 08:04:05 crc kubenswrapper[4787]: I0127 08:04:05.342908 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" event={"ID":"29bc137f-2121-451f-9684-2b4209b38c32","Type":"ContainerStarted","Data":"164c7cd54f21400960f6a02044f0385e9d033764f215d8f184c4692c13f29da2"} Jan 27 08:04:05 crc kubenswrapper[4787]: I0127 08:04:05.371117 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" podStartSLOduration=33.170817778 podStartE2EDuration="34.371076799s" podCreationTimestamp="2026-01-27 08:03:31 +0000 UTC" firstStartedPulling="2026-01-27 08:04:03.338586425 +0000 UTC m=+748.990941917" lastFinishedPulling="2026-01-27 08:04:04.538845446 +0000 UTC m=+750.191200938" observedRunningTime="2026-01-27 08:04:05.358134288 +0000 UTC m=+751.010489780" watchObservedRunningTime="2026-01-27 08:04:05.371076799 +0000 UTC m=+751.023432301" Jan 27 08:04:06 crc kubenswrapper[4787]: I0127 08:04:06.354475 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" event={"ID":"86b956d1-8553-4624-8324-f0a65d627e10","Type":"ContainerStarted","Data":"c46607a7d58e3bb0d3af713979259ea1e17fb369bdb0f75fdec56f5a3ff60ba7"} Jan 27 08:04:06 crc kubenswrapper[4787]: I0127 08:04:06.354958 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:04:06 crc kubenswrapper[4787]: I0127 08:04:06.382340 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-b8zkq" podStartSLOduration=32.155857214 podStartE2EDuration="35.382314637s" podCreationTimestamp="2026-01-27 08:03:31 +0000 UTC" firstStartedPulling="2026-01-27 08:04:02.339542518 +0000 UTC m=+747.991898010" lastFinishedPulling="2026-01-27 08:04:05.565999921 +0000 UTC m=+751.218355433" observedRunningTime="2026-01-27 08:04:06.380870761 +0000 UTC m=+752.033226263" watchObservedRunningTime="2026-01-27 08:04:06.382314637 +0000 UTC m=+752.034670129" Jan 27 08:04:07 crc kubenswrapper[4787]: I0127 08:04:07.370804 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" event={"ID":"51c3dc19-29a5-45ab-a384-fb7be3aa3f53","Type":"ContainerStarted","Data":"eda7cfc2e8ce29978f4ac8c6407bb454ba0d0d9022c89054043141ec4a20fe19"} Jan 27 08:04:07 crc kubenswrapper[4787]: I0127 08:04:07.395791 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-7qxzb" podStartSLOduration=33.170415409 podStartE2EDuration="35.395767203s" podCreationTimestamp="2026-01-27 08:03:32 +0000 UTC" firstStartedPulling="2026-01-27 08:04:04.307984956 +0000 UTC m=+749.960340448" lastFinishedPulling="2026-01-27 08:04:06.53333676 +0000 UTC m=+752.185692242" observedRunningTime="2026-01-27 08:04:07.391884044 +0000 UTC m=+753.044239556" watchObservedRunningTime="2026-01-27 08:04:07.395767203 +0000 UTC m=+753.048122695" Jan 27 08:04:10 crc kubenswrapper[4787]: I0127 08:04:10.822479 4787 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 27 08:04:12 crc kubenswrapper[4787]: I0127 08:04:12.460636 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5pnjd"] Jan 27 08:04:12 crc kubenswrapper[4787]: I0127 08:04:12.462073 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:12 crc kubenswrapper[4787]: I0127 08:04:12.476943 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5pnjd"] Jan 27 08:04:12 crc kubenswrapper[4787]: I0127 08:04:12.560865 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2969bccf-3d32-4a37-9291-988f4b2b74cb-utilities\") pod \"redhat-operators-5pnjd\" (UID: \"2969bccf-3d32-4a37-9291-988f4b2b74cb\") " pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:12 crc kubenswrapper[4787]: I0127 08:04:12.560915 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mv8p\" (UniqueName: \"kubernetes.io/projected/2969bccf-3d32-4a37-9291-988f4b2b74cb-kube-api-access-4mv8p\") pod \"redhat-operators-5pnjd\" (UID: \"2969bccf-3d32-4a37-9291-988f4b2b74cb\") " pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:12 crc kubenswrapper[4787]: I0127 08:04:12.560974 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2969bccf-3d32-4a37-9291-988f4b2b74cb-catalog-content\") pod \"redhat-operators-5pnjd\" (UID: \"2969bccf-3d32-4a37-9291-988f4b2b74cb\") " pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:12 crc kubenswrapper[4787]: I0127 08:04:12.662070 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2969bccf-3d32-4a37-9291-988f4b2b74cb-utilities\") pod \"redhat-operators-5pnjd\" (UID: \"2969bccf-3d32-4a37-9291-988f4b2b74cb\") " pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:12 crc kubenswrapper[4787]: I0127 08:04:12.662151 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mv8p\" (UniqueName: \"kubernetes.io/projected/2969bccf-3d32-4a37-9291-988f4b2b74cb-kube-api-access-4mv8p\") pod \"redhat-operators-5pnjd\" (UID: \"2969bccf-3d32-4a37-9291-988f4b2b74cb\") " pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:12 crc kubenswrapper[4787]: I0127 08:04:12.662173 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2969bccf-3d32-4a37-9291-988f4b2b74cb-catalog-content\") pod \"redhat-operators-5pnjd\" (UID: \"2969bccf-3d32-4a37-9291-988f4b2b74cb\") " pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:12 crc kubenswrapper[4787]: I0127 08:04:12.662622 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2969bccf-3d32-4a37-9291-988f4b2b74cb-utilities\") pod \"redhat-operators-5pnjd\" (UID: \"2969bccf-3d32-4a37-9291-988f4b2b74cb\") " pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:12 crc kubenswrapper[4787]: I0127 08:04:12.662691 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2969bccf-3d32-4a37-9291-988f4b2b74cb-catalog-content\") pod \"redhat-operators-5pnjd\" (UID: \"2969bccf-3d32-4a37-9291-988f4b2b74cb\") " pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:12 crc kubenswrapper[4787]: I0127 08:04:12.695991 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mv8p\" (UniqueName: \"kubernetes.io/projected/2969bccf-3d32-4a37-9291-988f4b2b74cb-kube-api-access-4mv8p\") pod \"redhat-operators-5pnjd\" (UID: \"2969bccf-3d32-4a37-9291-988f4b2b74cb\") " pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:12 crc kubenswrapper[4787]: I0127 08:04:12.794342 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:13 crc kubenswrapper[4787]: I0127 08:04:13.033035 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5pnjd"] Jan 27 08:04:13 crc kubenswrapper[4787]: I0127 08:04:13.422837 4787 generic.go:334] "Generic (PLEG): container finished" podID="2969bccf-3d32-4a37-9291-988f4b2b74cb" containerID="4d39e1ede4dbd1fbd2d05e09315c30fc85d98547d0055669a526537497a6dfc6" exitCode=0 Jan 27 08:04:13 crc kubenswrapper[4787]: I0127 08:04:13.422907 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5pnjd" event={"ID":"2969bccf-3d32-4a37-9291-988f4b2b74cb","Type":"ContainerDied","Data":"4d39e1ede4dbd1fbd2d05e09315c30fc85d98547d0055669a526537497a6dfc6"} Jan 27 08:04:13 crc kubenswrapper[4787]: I0127 08:04:13.422976 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5pnjd" event={"ID":"2969bccf-3d32-4a37-9291-988f4b2b74cb","Type":"ContainerStarted","Data":"c50821b4746329a2ae295587c3d77d0a2c81cc6611e1f0c92d6a210e102ae24a"} Jan 27 08:04:14 crc kubenswrapper[4787]: I0127 08:04:14.456709 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5pnjd" event={"ID":"2969bccf-3d32-4a37-9291-988f4b2b74cb","Type":"ContainerStarted","Data":"149f3b4ba2c4008c1dca690720d1a958e7656cb788706e58489a818c5c4e6df1"} Jan 27 08:04:14 crc kubenswrapper[4787]: I0127 08:04:14.990051 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6m9kb"] Jan 27 08:04:14 crc kubenswrapper[4787]: I0127 08:04:14.992056 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:15 crc kubenswrapper[4787]: I0127 08:04:15.001969 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88af0f5f-fa97-42c1-b7d6-311625465692-utilities\") pod \"redhat-marketplace-6m9kb\" (UID: \"88af0f5f-fa97-42c1-b7d6-311625465692\") " pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:15 crc kubenswrapper[4787]: I0127 08:04:15.002056 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dbf2\" (UniqueName: \"kubernetes.io/projected/88af0f5f-fa97-42c1-b7d6-311625465692-kube-api-access-4dbf2\") pod \"redhat-marketplace-6m9kb\" (UID: \"88af0f5f-fa97-42c1-b7d6-311625465692\") " pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:15 crc kubenswrapper[4787]: I0127 08:04:15.002117 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88af0f5f-fa97-42c1-b7d6-311625465692-catalog-content\") pod \"redhat-marketplace-6m9kb\" (UID: \"88af0f5f-fa97-42c1-b7d6-311625465692\") " pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:15 crc kubenswrapper[4787]: I0127 08:04:15.008761 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6m9kb"] Jan 27 08:04:15 crc kubenswrapper[4787]: I0127 08:04:15.102900 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88af0f5f-fa97-42c1-b7d6-311625465692-catalog-content\") pod \"redhat-marketplace-6m9kb\" (UID: \"88af0f5f-fa97-42c1-b7d6-311625465692\") " pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:15 crc kubenswrapper[4787]: I0127 08:04:15.103009 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88af0f5f-fa97-42c1-b7d6-311625465692-utilities\") pod \"redhat-marketplace-6m9kb\" (UID: \"88af0f5f-fa97-42c1-b7d6-311625465692\") " pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:15 crc kubenswrapper[4787]: I0127 08:04:15.103067 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dbf2\" (UniqueName: \"kubernetes.io/projected/88af0f5f-fa97-42c1-b7d6-311625465692-kube-api-access-4dbf2\") pod \"redhat-marketplace-6m9kb\" (UID: \"88af0f5f-fa97-42c1-b7d6-311625465692\") " pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:15 crc kubenswrapper[4787]: I0127 08:04:15.103472 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88af0f5f-fa97-42c1-b7d6-311625465692-catalog-content\") pod \"redhat-marketplace-6m9kb\" (UID: \"88af0f5f-fa97-42c1-b7d6-311625465692\") " pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:15 crc kubenswrapper[4787]: I0127 08:04:15.104574 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88af0f5f-fa97-42c1-b7d6-311625465692-utilities\") pod \"redhat-marketplace-6m9kb\" (UID: \"88af0f5f-fa97-42c1-b7d6-311625465692\") " pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:15 crc kubenswrapper[4787]: I0127 08:04:15.131851 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dbf2\" (UniqueName: \"kubernetes.io/projected/88af0f5f-fa97-42c1-b7d6-311625465692-kube-api-access-4dbf2\") pod \"redhat-marketplace-6m9kb\" (UID: \"88af0f5f-fa97-42c1-b7d6-311625465692\") " pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:15 crc kubenswrapper[4787]: I0127 08:04:15.310064 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:15 crc kubenswrapper[4787]: I0127 08:04:15.485995 4787 generic.go:334] "Generic (PLEG): container finished" podID="2969bccf-3d32-4a37-9291-988f4b2b74cb" containerID="149f3b4ba2c4008c1dca690720d1a958e7656cb788706e58489a818c5c4e6df1" exitCode=0 Jan 27 08:04:15 crc kubenswrapper[4787]: I0127 08:04:15.486200 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5pnjd" event={"ID":"2969bccf-3d32-4a37-9291-988f4b2b74cb","Type":"ContainerDied","Data":"149f3b4ba2c4008c1dca690720d1a958e7656cb788706e58489a818c5c4e6df1"} Jan 27 08:04:15 crc kubenswrapper[4787]: I0127 08:04:15.548296 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6m9kb"] Jan 27 08:04:15 crc kubenswrapper[4787]: W0127 08:04:15.554257 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod88af0f5f_fa97_42c1_b7d6_311625465692.slice/crio-bf8cbaf9a924d0b96269779abe10b8a7dfa5aa17bb374866581cd219b082df2a WatchSource:0}: Error finding container bf8cbaf9a924d0b96269779abe10b8a7dfa5aa17bb374866581cd219b082df2a: Status 404 returned error can't find the container with id bf8cbaf9a924d0b96269779abe10b8a7dfa5aa17bb374866581cd219b082df2a Jan 27 08:04:16 crc kubenswrapper[4787]: I0127 08:04:16.497846 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5pnjd" event={"ID":"2969bccf-3d32-4a37-9291-988f4b2b74cb","Type":"ContainerStarted","Data":"af8e7f65b7eb782a5886e82e9e39b8338e065742d4623e1b46bc0facd957f010"} Jan 27 08:04:16 crc kubenswrapper[4787]: I0127 08:04:16.499121 4787 generic.go:334] "Generic (PLEG): container finished" podID="88af0f5f-fa97-42c1-b7d6-311625465692" containerID="8a9cab427f031b8a46a4b4b768dd56fa1a47b6601261ca5d3de39774159a4026" exitCode=0 Jan 27 08:04:16 crc kubenswrapper[4787]: I0127 08:04:16.499148 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6m9kb" event={"ID":"88af0f5f-fa97-42c1-b7d6-311625465692","Type":"ContainerDied","Data":"8a9cab427f031b8a46a4b4b768dd56fa1a47b6601261ca5d3de39774159a4026"} Jan 27 08:04:16 crc kubenswrapper[4787]: I0127 08:04:16.499165 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6m9kb" event={"ID":"88af0f5f-fa97-42c1-b7d6-311625465692","Type":"ContainerStarted","Data":"bf8cbaf9a924d0b96269779abe10b8a7dfa5aa17bb374866581cd219b082df2a"} Jan 27 08:04:16 crc kubenswrapper[4787]: I0127 08:04:16.520780 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5pnjd" podStartSLOduration=1.956624106 podStartE2EDuration="4.520754922s" podCreationTimestamp="2026-01-27 08:04:12 +0000 UTC" firstStartedPulling="2026-01-27 08:04:13.424468489 +0000 UTC m=+759.076823981" lastFinishedPulling="2026-01-27 08:04:15.988599315 +0000 UTC m=+761.640954797" observedRunningTime="2026-01-27 08:04:16.51871691 +0000 UTC m=+762.171072502" watchObservedRunningTime="2026-01-27 08:04:16.520754922 +0000 UTC m=+762.173110414" Jan 27 08:04:18 crc kubenswrapper[4787]: I0127 08:04:18.515451 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6m9kb" event={"ID":"88af0f5f-fa97-42c1-b7d6-311625465692","Type":"ContainerStarted","Data":"b6084011bc3d9d9cb92c4dfc67e822a60c1ce1099b82905c842288c4c0c33297"} Jan 27 08:04:19 crc kubenswrapper[4787]: I0127 08:04:19.529514 4787 generic.go:334] "Generic (PLEG): container finished" podID="88af0f5f-fa97-42c1-b7d6-311625465692" containerID="b6084011bc3d9d9cb92c4dfc67e822a60c1ce1099b82905c842288c4c0c33297" exitCode=0 Jan 27 08:04:19 crc kubenswrapper[4787]: I0127 08:04:19.529614 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6m9kb" event={"ID":"88af0f5f-fa97-42c1-b7d6-311625465692","Type":"ContainerDied","Data":"b6084011bc3d9d9cb92c4dfc67e822a60c1ce1099b82905c842288c4c0c33297"} Jan 27 08:04:21 crc kubenswrapper[4787]: I0127 08:04:21.546621 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6m9kb" event={"ID":"88af0f5f-fa97-42c1-b7d6-311625465692","Type":"ContainerStarted","Data":"8751cd3e0dd17c3ecaafc8de052509e4464539e2c0ebee461e47fa1d86da4915"} Jan 27 08:04:22 crc kubenswrapper[4787]: I0127 08:04:22.306489 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-n6mzk" Jan 27 08:04:22 crc kubenswrapper[4787]: I0127 08:04:22.572518 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6m9kb" podStartSLOduration=4.769634836 podStartE2EDuration="8.572491905s" podCreationTimestamp="2026-01-27 08:04:14 +0000 UTC" firstStartedPulling="2026-01-27 08:04:16.503213544 +0000 UTC m=+762.155569036" lastFinishedPulling="2026-01-27 08:04:20.306070583 +0000 UTC m=+765.958426105" observedRunningTime="2026-01-27 08:04:22.571608483 +0000 UTC m=+768.223963975" watchObservedRunningTime="2026-01-27 08:04:22.572491905 +0000 UTC m=+768.224847397" Jan 27 08:04:22 crc kubenswrapper[4787]: I0127 08:04:22.794711 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:22 crc kubenswrapper[4787]: I0127 08:04:22.795109 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:22 crc kubenswrapper[4787]: I0127 08:04:22.837744 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:23 crc kubenswrapper[4787]: I0127 08:04:23.604073 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:23 crc kubenswrapper[4787]: I0127 08:04:23.979585 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5pnjd"] Jan 27 08:04:25 crc kubenswrapper[4787]: I0127 08:04:25.310936 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:25 crc kubenswrapper[4787]: I0127 08:04:25.311013 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:25 crc kubenswrapper[4787]: I0127 08:04:25.361059 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:25 crc kubenswrapper[4787]: I0127 08:04:25.571410 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5pnjd" podUID="2969bccf-3d32-4a37-9291-988f4b2b74cb" containerName="registry-server" containerID="cri-o://af8e7f65b7eb782a5886e82e9e39b8338e065742d4623e1b46bc0facd957f010" gracePeriod=2 Jan 27 08:04:25 crc kubenswrapper[4787]: I0127 08:04:25.615508 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:25 crc kubenswrapper[4787]: I0127 08:04:25.933470 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.002690 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mv8p\" (UniqueName: \"kubernetes.io/projected/2969bccf-3d32-4a37-9291-988f4b2b74cb-kube-api-access-4mv8p\") pod \"2969bccf-3d32-4a37-9291-988f4b2b74cb\" (UID: \"2969bccf-3d32-4a37-9291-988f4b2b74cb\") " Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.002849 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2969bccf-3d32-4a37-9291-988f4b2b74cb-catalog-content\") pod \"2969bccf-3d32-4a37-9291-988f4b2b74cb\" (UID: \"2969bccf-3d32-4a37-9291-988f4b2b74cb\") " Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.002881 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2969bccf-3d32-4a37-9291-988f4b2b74cb-utilities\") pod \"2969bccf-3d32-4a37-9291-988f4b2b74cb\" (UID: \"2969bccf-3d32-4a37-9291-988f4b2b74cb\") " Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.004572 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2969bccf-3d32-4a37-9291-988f4b2b74cb-utilities" (OuterVolumeSpecName: "utilities") pod "2969bccf-3d32-4a37-9291-988f4b2b74cb" (UID: "2969bccf-3d32-4a37-9291-988f4b2b74cb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.010700 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2969bccf-3d32-4a37-9291-988f4b2b74cb-kube-api-access-4mv8p" (OuterVolumeSpecName: "kube-api-access-4mv8p") pod "2969bccf-3d32-4a37-9291-988f4b2b74cb" (UID: "2969bccf-3d32-4a37-9291-988f4b2b74cb"). InnerVolumeSpecName "kube-api-access-4mv8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.105219 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mv8p\" (UniqueName: \"kubernetes.io/projected/2969bccf-3d32-4a37-9291-988f4b2b74cb-kube-api-access-4mv8p\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.105260 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2969bccf-3d32-4a37-9291-988f4b2b74cb-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.122123 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2969bccf-3d32-4a37-9291-988f4b2b74cb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2969bccf-3d32-4a37-9291-988f4b2b74cb" (UID: "2969bccf-3d32-4a37-9291-988f4b2b74cb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.206723 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2969bccf-3d32-4a37-9291-988f4b2b74cb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.580938 4787 generic.go:334] "Generic (PLEG): container finished" podID="2969bccf-3d32-4a37-9291-988f4b2b74cb" containerID="af8e7f65b7eb782a5886e82e9e39b8338e065742d4623e1b46bc0facd957f010" exitCode=0 Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.581003 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5pnjd" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.580996 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5pnjd" event={"ID":"2969bccf-3d32-4a37-9291-988f4b2b74cb","Type":"ContainerDied","Data":"af8e7f65b7eb782a5886e82e9e39b8338e065742d4623e1b46bc0facd957f010"} Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.581429 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5pnjd" event={"ID":"2969bccf-3d32-4a37-9291-988f4b2b74cb","Type":"ContainerDied","Data":"c50821b4746329a2ae295587c3d77d0a2c81cc6611e1f0c92d6a210e102ae24a"} Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.581456 4787 scope.go:117] "RemoveContainer" containerID="af8e7f65b7eb782a5886e82e9e39b8338e065742d4623e1b46bc0facd957f010" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.599771 4787 scope.go:117] "RemoveContainer" containerID="149f3b4ba2c4008c1dca690720d1a958e7656cb788706e58489a818c5c4e6df1" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.625037 4787 scope.go:117] "RemoveContainer" containerID="4d39e1ede4dbd1fbd2d05e09315c30fc85d98547d0055669a526537497a6dfc6" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.626331 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5pnjd"] Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.630726 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5pnjd"] Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.658022 4787 scope.go:117] "RemoveContainer" containerID="af8e7f65b7eb782a5886e82e9e39b8338e065742d4623e1b46bc0facd957f010" Jan 27 08:04:26 crc kubenswrapper[4787]: E0127 08:04:26.658795 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af8e7f65b7eb782a5886e82e9e39b8338e065742d4623e1b46bc0facd957f010\": container with ID starting with af8e7f65b7eb782a5886e82e9e39b8338e065742d4623e1b46bc0facd957f010 not found: ID does not exist" containerID="af8e7f65b7eb782a5886e82e9e39b8338e065742d4623e1b46bc0facd957f010" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.658863 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af8e7f65b7eb782a5886e82e9e39b8338e065742d4623e1b46bc0facd957f010"} err="failed to get container status \"af8e7f65b7eb782a5886e82e9e39b8338e065742d4623e1b46bc0facd957f010\": rpc error: code = NotFound desc = could not find container \"af8e7f65b7eb782a5886e82e9e39b8338e065742d4623e1b46bc0facd957f010\": container with ID starting with af8e7f65b7eb782a5886e82e9e39b8338e065742d4623e1b46bc0facd957f010 not found: ID does not exist" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.658901 4787 scope.go:117] "RemoveContainer" containerID="149f3b4ba2c4008c1dca690720d1a958e7656cb788706e58489a818c5c4e6df1" Jan 27 08:04:26 crc kubenswrapper[4787]: E0127 08:04:26.659621 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"149f3b4ba2c4008c1dca690720d1a958e7656cb788706e58489a818c5c4e6df1\": container with ID starting with 149f3b4ba2c4008c1dca690720d1a958e7656cb788706e58489a818c5c4e6df1 not found: ID does not exist" containerID="149f3b4ba2c4008c1dca690720d1a958e7656cb788706e58489a818c5c4e6df1" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.659690 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"149f3b4ba2c4008c1dca690720d1a958e7656cb788706e58489a818c5c4e6df1"} err="failed to get container status \"149f3b4ba2c4008c1dca690720d1a958e7656cb788706e58489a818c5c4e6df1\": rpc error: code = NotFound desc = could not find container \"149f3b4ba2c4008c1dca690720d1a958e7656cb788706e58489a818c5c4e6df1\": container with ID starting with 149f3b4ba2c4008c1dca690720d1a958e7656cb788706e58489a818c5c4e6df1 not found: ID does not exist" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.659742 4787 scope.go:117] "RemoveContainer" containerID="4d39e1ede4dbd1fbd2d05e09315c30fc85d98547d0055669a526537497a6dfc6" Jan 27 08:04:26 crc kubenswrapper[4787]: E0127 08:04:26.660173 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d39e1ede4dbd1fbd2d05e09315c30fc85d98547d0055669a526537497a6dfc6\": container with ID starting with 4d39e1ede4dbd1fbd2d05e09315c30fc85d98547d0055669a526537497a6dfc6 not found: ID does not exist" containerID="4d39e1ede4dbd1fbd2d05e09315c30fc85d98547d0055669a526537497a6dfc6" Jan 27 08:04:26 crc kubenswrapper[4787]: I0127 08:04:26.660211 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d39e1ede4dbd1fbd2d05e09315c30fc85d98547d0055669a526537497a6dfc6"} err="failed to get container status \"4d39e1ede4dbd1fbd2d05e09315c30fc85d98547d0055669a526537497a6dfc6\": rpc error: code = NotFound desc = could not find container \"4d39e1ede4dbd1fbd2d05e09315c30fc85d98547d0055669a526537497a6dfc6\": container with ID starting with 4d39e1ede4dbd1fbd2d05e09315c30fc85d98547d0055669a526537497a6dfc6 not found: ID does not exist" Jan 27 08:04:27 crc kubenswrapper[4787]: I0127 08:04:27.083507 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2969bccf-3d32-4a37-9291-988f4b2b74cb" path="/var/lib/kubelet/pods/2969bccf-3d32-4a37-9291-988f4b2b74cb/volumes" Jan 27 08:04:27 crc kubenswrapper[4787]: I0127 08:04:27.777705 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6m9kb"] Jan 27 08:04:27 crc kubenswrapper[4787]: I0127 08:04:27.778745 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6m9kb" podUID="88af0f5f-fa97-42c1-b7d6-311625465692" containerName="registry-server" containerID="cri-o://8751cd3e0dd17c3ecaafc8de052509e4464539e2c0ebee461e47fa1d86da4915" gracePeriod=2 Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.122348 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.232324 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88af0f5f-fa97-42c1-b7d6-311625465692-utilities\") pod \"88af0f5f-fa97-42c1-b7d6-311625465692\" (UID: \"88af0f5f-fa97-42c1-b7d6-311625465692\") " Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.232393 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88af0f5f-fa97-42c1-b7d6-311625465692-catalog-content\") pod \"88af0f5f-fa97-42c1-b7d6-311625465692\" (UID: \"88af0f5f-fa97-42c1-b7d6-311625465692\") " Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.232498 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dbf2\" (UniqueName: \"kubernetes.io/projected/88af0f5f-fa97-42c1-b7d6-311625465692-kube-api-access-4dbf2\") pod \"88af0f5f-fa97-42c1-b7d6-311625465692\" (UID: \"88af0f5f-fa97-42c1-b7d6-311625465692\") " Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.234082 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88af0f5f-fa97-42c1-b7d6-311625465692-utilities" (OuterVolumeSpecName: "utilities") pod "88af0f5f-fa97-42c1-b7d6-311625465692" (UID: "88af0f5f-fa97-42c1-b7d6-311625465692"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.239747 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88af0f5f-fa97-42c1-b7d6-311625465692-kube-api-access-4dbf2" (OuterVolumeSpecName: "kube-api-access-4dbf2") pod "88af0f5f-fa97-42c1-b7d6-311625465692" (UID: "88af0f5f-fa97-42c1-b7d6-311625465692"). InnerVolumeSpecName "kube-api-access-4dbf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.268992 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88af0f5f-fa97-42c1-b7d6-311625465692-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "88af0f5f-fa97-42c1-b7d6-311625465692" (UID: "88af0f5f-fa97-42c1-b7d6-311625465692"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.334471 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dbf2\" (UniqueName: \"kubernetes.io/projected/88af0f5f-fa97-42c1-b7d6-311625465692-kube-api-access-4dbf2\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.334513 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88af0f5f-fa97-42c1-b7d6-311625465692-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.334526 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88af0f5f-fa97-42c1-b7d6-311625465692-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.426210 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-qptnb" podUID="b070600e-8a6f-4bb9-a1c2-e763f55d90eb" containerName="console" containerID="cri-o://310ccc96bfc18eaae7b0abfc190b26a49989ec96f4c419c7eab3252a1004a577" gracePeriod=15 Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.606416 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-qptnb_b070600e-8a6f-4bb9-a1c2-e763f55d90eb/console/0.log" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.606482 4787 generic.go:334] "Generic (PLEG): container finished" podID="b070600e-8a6f-4bb9-a1c2-e763f55d90eb" containerID="310ccc96bfc18eaae7b0abfc190b26a49989ec96f4c419c7eab3252a1004a577" exitCode=2 Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.606574 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-qptnb" event={"ID":"b070600e-8a6f-4bb9-a1c2-e763f55d90eb","Type":"ContainerDied","Data":"310ccc96bfc18eaae7b0abfc190b26a49989ec96f4c419c7eab3252a1004a577"} Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.611044 4787 generic.go:334] "Generic (PLEG): container finished" podID="88af0f5f-fa97-42c1-b7d6-311625465692" containerID="8751cd3e0dd17c3ecaafc8de052509e4464539e2c0ebee461e47fa1d86da4915" exitCode=0 Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.611100 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6m9kb" event={"ID":"88af0f5f-fa97-42c1-b7d6-311625465692","Type":"ContainerDied","Data":"8751cd3e0dd17c3ecaafc8de052509e4464539e2c0ebee461e47fa1d86da4915"} Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.611134 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6m9kb" event={"ID":"88af0f5f-fa97-42c1-b7d6-311625465692","Type":"ContainerDied","Data":"bf8cbaf9a924d0b96269779abe10b8a7dfa5aa17bb374866581cd219b082df2a"} Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.611150 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6m9kb" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.611156 4787 scope.go:117] "RemoveContainer" containerID="8751cd3e0dd17c3ecaafc8de052509e4464539e2c0ebee461e47fa1d86da4915" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.641190 4787 scope.go:117] "RemoveContainer" containerID="b6084011bc3d9d9cb92c4dfc67e822a60c1ce1099b82905c842288c4c0c33297" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.651682 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6m9kb"] Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.658204 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6m9kb"] Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.672458 4787 scope.go:117] "RemoveContainer" containerID="8a9cab427f031b8a46a4b4b768dd56fa1a47b6601261ca5d3de39774159a4026" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.705967 4787 scope.go:117] "RemoveContainer" containerID="8751cd3e0dd17c3ecaafc8de052509e4464539e2c0ebee461e47fa1d86da4915" Jan 27 08:04:28 crc kubenswrapper[4787]: E0127 08:04:28.706813 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8751cd3e0dd17c3ecaafc8de052509e4464539e2c0ebee461e47fa1d86da4915\": container with ID starting with 8751cd3e0dd17c3ecaafc8de052509e4464539e2c0ebee461e47fa1d86da4915 not found: ID does not exist" containerID="8751cd3e0dd17c3ecaafc8de052509e4464539e2c0ebee461e47fa1d86da4915" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.706846 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8751cd3e0dd17c3ecaafc8de052509e4464539e2c0ebee461e47fa1d86da4915"} err="failed to get container status \"8751cd3e0dd17c3ecaafc8de052509e4464539e2c0ebee461e47fa1d86da4915\": rpc error: code = NotFound desc = could not find container \"8751cd3e0dd17c3ecaafc8de052509e4464539e2c0ebee461e47fa1d86da4915\": container with ID starting with 8751cd3e0dd17c3ecaafc8de052509e4464539e2c0ebee461e47fa1d86da4915 not found: ID does not exist" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.706891 4787 scope.go:117] "RemoveContainer" containerID="b6084011bc3d9d9cb92c4dfc67e822a60c1ce1099b82905c842288c4c0c33297" Jan 27 08:04:28 crc kubenswrapper[4787]: E0127 08:04:28.707370 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6084011bc3d9d9cb92c4dfc67e822a60c1ce1099b82905c842288c4c0c33297\": container with ID starting with b6084011bc3d9d9cb92c4dfc67e822a60c1ce1099b82905c842288c4c0c33297 not found: ID does not exist" containerID="b6084011bc3d9d9cb92c4dfc67e822a60c1ce1099b82905c842288c4c0c33297" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.707386 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6084011bc3d9d9cb92c4dfc67e822a60c1ce1099b82905c842288c4c0c33297"} err="failed to get container status \"b6084011bc3d9d9cb92c4dfc67e822a60c1ce1099b82905c842288c4c0c33297\": rpc error: code = NotFound desc = could not find container \"b6084011bc3d9d9cb92c4dfc67e822a60c1ce1099b82905c842288c4c0c33297\": container with ID starting with b6084011bc3d9d9cb92c4dfc67e822a60c1ce1099b82905c842288c4c0c33297 not found: ID does not exist" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.707406 4787 scope.go:117] "RemoveContainer" containerID="8a9cab427f031b8a46a4b4b768dd56fa1a47b6601261ca5d3de39774159a4026" Jan 27 08:04:28 crc kubenswrapper[4787]: E0127 08:04:28.707889 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a9cab427f031b8a46a4b4b768dd56fa1a47b6601261ca5d3de39774159a4026\": container with ID starting with 8a9cab427f031b8a46a4b4b768dd56fa1a47b6601261ca5d3de39774159a4026 not found: ID does not exist" containerID="8a9cab427f031b8a46a4b4b768dd56fa1a47b6601261ca5d3de39774159a4026" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.707948 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a9cab427f031b8a46a4b4b768dd56fa1a47b6601261ca5d3de39774159a4026"} err="failed to get container status \"8a9cab427f031b8a46a4b4b768dd56fa1a47b6601261ca5d3de39774159a4026\": rpc error: code = NotFound desc = could not find container \"8a9cab427f031b8a46a4b4b768dd56fa1a47b6601261ca5d3de39774159a4026\": container with ID starting with 8a9cab427f031b8a46a4b4b768dd56fa1a47b6601261ca5d3de39774159a4026 not found: ID does not exist" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.795600 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-qptnb_b070600e-8a6f-4bb9-a1c2-e763f55d90eb/console/0.log" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.795703 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-qptnb" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.845891 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-oauth-serving-cert\") pod \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.845967 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-oauth-config\") pod \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.846006 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-trusted-ca-bundle\") pod \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.846057 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9cbr9\" (UniqueName: \"kubernetes.io/projected/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-kube-api-access-9cbr9\") pod \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.847296 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "b070600e-8a6f-4bb9-a1c2-e763f55d90eb" (UID: "b070600e-8a6f-4bb9-a1c2-e763f55d90eb"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.847498 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "b070600e-8a6f-4bb9-a1c2-e763f55d90eb" (UID: "b070600e-8a6f-4bb9-a1c2-e763f55d90eb"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.847586 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-config\") pod \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.847678 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-serving-cert\") pod \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.848141 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-config" (OuterVolumeSpecName: "console-config") pod "b070600e-8a6f-4bb9-a1c2-e763f55d90eb" (UID: "b070600e-8a6f-4bb9-a1c2-e763f55d90eb"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.848176 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-service-ca\") pod \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\" (UID: \"b070600e-8a6f-4bb9-a1c2-e763f55d90eb\") " Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.848480 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-service-ca" (OuterVolumeSpecName: "service-ca") pod "b070600e-8a6f-4bb9-a1c2-e763f55d90eb" (UID: "b070600e-8a6f-4bb9-a1c2-e763f55d90eb"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.848638 4787 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-config\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.848659 4787 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-service-ca\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.848669 4787 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.848681 4787 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.853190 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "b070600e-8a6f-4bb9-a1c2-e763f55d90eb" (UID: "b070600e-8a6f-4bb9-a1c2-e763f55d90eb"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.853543 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-kube-api-access-9cbr9" (OuterVolumeSpecName: "kube-api-access-9cbr9") pod "b070600e-8a6f-4bb9-a1c2-e763f55d90eb" (UID: "b070600e-8a6f-4bb9-a1c2-e763f55d90eb"). InnerVolumeSpecName "kube-api-access-9cbr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.854051 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "b070600e-8a6f-4bb9-a1c2-e763f55d90eb" (UID: "b070600e-8a6f-4bb9-a1c2-e763f55d90eb"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.950074 4787 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.950456 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9cbr9\" (UniqueName: \"kubernetes.io/projected/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-kube-api-access-9cbr9\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:28 crc kubenswrapper[4787]: I0127 08:04:28.950472 4787 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b070600e-8a6f-4bb9-a1c2-e763f55d90eb-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:29 crc kubenswrapper[4787]: I0127 08:04:29.089268 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88af0f5f-fa97-42c1-b7d6-311625465692" path="/var/lib/kubelet/pods/88af0f5f-fa97-42c1-b7d6-311625465692/volumes" Jan 27 08:04:29 crc kubenswrapper[4787]: I0127 08:04:29.624428 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-qptnb_b070600e-8a6f-4bb9-a1c2-e763f55d90eb/console/0.log" Jan 27 08:04:29 crc kubenswrapper[4787]: I0127 08:04:29.625117 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-qptnb" Jan 27 08:04:29 crc kubenswrapper[4787]: I0127 08:04:29.625814 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-qptnb" event={"ID":"b070600e-8a6f-4bb9-a1c2-e763f55d90eb","Type":"ContainerDied","Data":"29d34fb56a5ea9d4aa885d1db94f1f656d15f31f013518446e7a295dc5daddbd"} Jan 27 08:04:29 crc kubenswrapper[4787]: I0127 08:04:29.625893 4787 scope.go:117] "RemoveContainer" containerID="310ccc96bfc18eaae7b0abfc190b26a49989ec96f4c419c7eab3252a1004a577" Jan 27 08:04:29 crc kubenswrapper[4787]: I0127 08:04:29.655225 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-qptnb"] Jan 27 08:04:29 crc kubenswrapper[4787]: I0127 08:04:29.660467 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-qptnb"] Jan 27 08:04:31 crc kubenswrapper[4787]: I0127 08:04:31.085602 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b070600e-8a6f-4bb9-a1c2-e763f55d90eb" path="/var/lib/kubelet/pods/b070600e-8a6f-4bb9-a1c2-e763f55d90eb/volumes" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.232625 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77"] Jan 27 08:04:35 crc kubenswrapper[4787]: E0127 08:04:35.233543 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2969bccf-3d32-4a37-9291-988f4b2b74cb" containerName="extract-content" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.233668 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="2969bccf-3d32-4a37-9291-988f4b2b74cb" containerName="extract-content" Jan 27 08:04:35 crc kubenswrapper[4787]: E0127 08:04:35.233692 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88af0f5f-fa97-42c1-b7d6-311625465692" containerName="extract-content" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.233701 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="88af0f5f-fa97-42c1-b7d6-311625465692" containerName="extract-content" Jan 27 08:04:35 crc kubenswrapper[4787]: E0127 08:04:35.233725 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88af0f5f-fa97-42c1-b7d6-311625465692" containerName="registry-server" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.233734 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="88af0f5f-fa97-42c1-b7d6-311625465692" containerName="registry-server" Jan 27 08:04:35 crc kubenswrapper[4787]: E0127 08:04:35.233747 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2969bccf-3d32-4a37-9291-988f4b2b74cb" containerName="extract-utilities" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.233756 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="2969bccf-3d32-4a37-9291-988f4b2b74cb" containerName="extract-utilities" Jan 27 08:04:35 crc kubenswrapper[4787]: E0127 08:04:35.233769 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88af0f5f-fa97-42c1-b7d6-311625465692" containerName="extract-utilities" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.233776 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="88af0f5f-fa97-42c1-b7d6-311625465692" containerName="extract-utilities" Jan 27 08:04:35 crc kubenswrapper[4787]: E0127 08:04:35.233792 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b070600e-8a6f-4bb9-a1c2-e763f55d90eb" containerName="console" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.233799 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="b070600e-8a6f-4bb9-a1c2-e763f55d90eb" containerName="console" Jan 27 08:04:35 crc kubenswrapper[4787]: E0127 08:04:35.233812 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2969bccf-3d32-4a37-9291-988f4b2b74cb" containerName="registry-server" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.233819 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="2969bccf-3d32-4a37-9291-988f4b2b74cb" containerName="registry-server" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.233960 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="88af0f5f-fa97-42c1-b7d6-311625465692" containerName="registry-server" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.233979 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="b070600e-8a6f-4bb9-a1c2-e763f55d90eb" containerName="console" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.233989 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="2969bccf-3d32-4a37-9291-988f4b2b74cb" containerName="registry-server" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.234988 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.239163 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.243819 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77"] Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.366527 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db32e829-ff6c-4e31-bbc4-5291eb8127d3-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77\" (UID: \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.366614 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db32e829-ff6c-4e31-bbc4-5291eb8127d3-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77\" (UID: \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.366660 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c7j7\" (UniqueName: \"kubernetes.io/projected/db32e829-ff6c-4e31-bbc4-5291eb8127d3-kube-api-access-2c7j7\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77\" (UID: \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.467597 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db32e829-ff6c-4e31-bbc4-5291eb8127d3-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77\" (UID: \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.467708 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c7j7\" (UniqueName: \"kubernetes.io/projected/db32e829-ff6c-4e31-bbc4-5291eb8127d3-kube-api-access-2c7j7\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77\" (UID: \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.467774 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db32e829-ff6c-4e31-bbc4-5291eb8127d3-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77\" (UID: \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.468697 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db32e829-ff6c-4e31-bbc4-5291eb8127d3-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77\" (UID: \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.469671 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db32e829-ff6c-4e31-bbc4-5291eb8127d3-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77\" (UID: \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.497437 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c7j7\" (UniqueName: \"kubernetes.io/projected/db32e829-ff6c-4e31-bbc4-5291eb8127d3-kube-api-access-2c7j7\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77\" (UID: \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.560131 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" Jan 27 08:04:35 crc kubenswrapper[4787]: I0127 08:04:35.846362 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77"] Jan 27 08:04:36 crc kubenswrapper[4787]: I0127 08:04:36.683196 4787 generic.go:334] "Generic (PLEG): container finished" podID="db32e829-ff6c-4e31-bbc4-5291eb8127d3" containerID="d99ae58a358703ed015efa605b660c711aca9e1dfb36edd9c93656d0cc45fe0e" exitCode=0 Jan 27 08:04:36 crc kubenswrapper[4787]: I0127 08:04:36.683261 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" event={"ID":"db32e829-ff6c-4e31-bbc4-5291eb8127d3","Type":"ContainerDied","Data":"d99ae58a358703ed015efa605b660c711aca9e1dfb36edd9c93656d0cc45fe0e"} Jan 27 08:04:36 crc kubenswrapper[4787]: I0127 08:04:36.683307 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" event={"ID":"db32e829-ff6c-4e31-bbc4-5291eb8127d3","Type":"ContainerStarted","Data":"5b8c02e6b42a52df223a377cafaeace92053f80eac27df2ea6cc17d8538627d4"} Jan 27 08:04:38 crc kubenswrapper[4787]: I0127 08:04:38.696651 4787 generic.go:334] "Generic (PLEG): container finished" podID="db32e829-ff6c-4e31-bbc4-5291eb8127d3" containerID="50ef9eb656d7796d5878d5a68c71d99d16c113ebd6e6510949948c6ef6f8ee11" exitCode=0 Jan 27 08:04:38 crc kubenswrapper[4787]: I0127 08:04:38.696756 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" event={"ID":"db32e829-ff6c-4e31-bbc4-5291eb8127d3","Type":"ContainerDied","Data":"50ef9eb656d7796d5878d5a68c71d99d16c113ebd6e6510949948c6ef6f8ee11"} Jan 27 08:04:39 crc kubenswrapper[4787]: I0127 08:04:39.714599 4787 generic.go:334] "Generic (PLEG): container finished" podID="db32e829-ff6c-4e31-bbc4-5291eb8127d3" containerID="cbaf0da5be2dfab10bd6788dadf84fb3502ff97babbcd19d4eb4b76d243cb438" exitCode=0 Jan 27 08:04:39 crc kubenswrapper[4787]: I0127 08:04:39.714686 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" event={"ID":"db32e829-ff6c-4e31-bbc4-5291eb8127d3","Type":"ContainerDied","Data":"cbaf0da5be2dfab10bd6788dadf84fb3502ff97babbcd19d4eb4b76d243cb438"} Jan 27 08:04:40 crc kubenswrapper[4787]: I0127 08:04:40.974534 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" Jan 27 08:04:41 crc kubenswrapper[4787]: I0127 08:04:41.053384 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db32e829-ff6c-4e31-bbc4-5291eb8127d3-util\") pod \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\" (UID: \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\") " Jan 27 08:04:41 crc kubenswrapper[4787]: I0127 08:04:41.053489 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db32e829-ff6c-4e31-bbc4-5291eb8127d3-bundle\") pod \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\" (UID: \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\") " Jan 27 08:04:41 crc kubenswrapper[4787]: I0127 08:04:41.053607 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2c7j7\" (UniqueName: \"kubernetes.io/projected/db32e829-ff6c-4e31-bbc4-5291eb8127d3-kube-api-access-2c7j7\") pod \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\" (UID: \"db32e829-ff6c-4e31-bbc4-5291eb8127d3\") " Jan 27 08:04:41 crc kubenswrapper[4787]: I0127 08:04:41.054682 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db32e829-ff6c-4e31-bbc4-5291eb8127d3-bundle" (OuterVolumeSpecName: "bundle") pod "db32e829-ff6c-4e31-bbc4-5291eb8127d3" (UID: "db32e829-ff6c-4e31-bbc4-5291eb8127d3"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:04:41 crc kubenswrapper[4787]: I0127 08:04:41.075186 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db32e829-ff6c-4e31-bbc4-5291eb8127d3-util" (OuterVolumeSpecName: "util") pod "db32e829-ff6c-4e31-bbc4-5291eb8127d3" (UID: "db32e829-ff6c-4e31-bbc4-5291eb8127d3"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:04:41 crc kubenswrapper[4787]: I0127 08:04:41.088810 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db32e829-ff6c-4e31-bbc4-5291eb8127d3-kube-api-access-2c7j7" (OuterVolumeSpecName: "kube-api-access-2c7j7") pod "db32e829-ff6c-4e31-bbc4-5291eb8127d3" (UID: "db32e829-ff6c-4e31-bbc4-5291eb8127d3"). InnerVolumeSpecName "kube-api-access-2c7j7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:04:41 crc kubenswrapper[4787]: I0127 08:04:41.155378 4787 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db32e829-ff6c-4e31-bbc4-5291eb8127d3-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:41 crc kubenswrapper[4787]: I0127 08:04:41.155420 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2c7j7\" (UniqueName: \"kubernetes.io/projected/db32e829-ff6c-4e31-bbc4-5291eb8127d3-kube-api-access-2c7j7\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:41 crc kubenswrapper[4787]: I0127 08:04:41.155431 4787 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db32e829-ff6c-4e31-bbc4-5291eb8127d3-util\") on node \"crc\" DevicePath \"\"" Jan 27 08:04:41 crc kubenswrapper[4787]: I0127 08:04:41.732758 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" event={"ID":"db32e829-ff6c-4e31-bbc4-5291eb8127d3","Type":"ContainerDied","Data":"5b8c02e6b42a52df223a377cafaeace92053f80eac27df2ea6cc17d8538627d4"} Jan 27 08:04:41 crc kubenswrapper[4787]: I0127 08:04:41.732825 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b8c02e6b42a52df223a377cafaeace92053f80eac27df2ea6cc17d8538627d4" Jan 27 08:04:41 crc kubenswrapper[4787]: I0127 08:04:41.733545 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.604970 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk"] Jan 27 08:04:50 crc kubenswrapper[4787]: E0127 08:04:50.606320 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db32e829-ff6c-4e31-bbc4-5291eb8127d3" containerName="pull" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.606343 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="db32e829-ff6c-4e31-bbc4-5291eb8127d3" containerName="pull" Jan 27 08:04:50 crc kubenswrapper[4787]: E0127 08:04:50.606363 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db32e829-ff6c-4e31-bbc4-5291eb8127d3" containerName="util" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.606374 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="db32e829-ff6c-4e31-bbc4-5291eb8127d3" containerName="util" Jan 27 08:04:50 crc kubenswrapper[4787]: E0127 08:04:50.606389 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db32e829-ff6c-4e31-bbc4-5291eb8127d3" containerName="extract" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.606403 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="db32e829-ff6c-4e31-bbc4-5291eb8127d3" containerName="extract" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.606587 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="db32e829-ff6c-4e31-bbc4-5291eb8127d3" containerName="extract" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.607311 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.609974 4787 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.611420 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.611928 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.612047 4787 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-f8lbz" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.612525 4787 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.687690 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk"] Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.689291 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6d5bf055-d90c-451f-aaf7-19140fcea291-apiservice-cert\") pod \"metallb-operator-controller-manager-555548cdf7-hwkjk\" (UID: \"6d5bf055-d90c-451f-aaf7-19140fcea291\") " pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.689506 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6d5bf055-d90c-451f-aaf7-19140fcea291-webhook-cert\") pod \"metallb-operator-controller-manager-555548cdf7-hwkjk\" (UID: \"6d5bf055-d90c-451f-aaf7-19140fcea291\") " pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.689596 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzsrh\" (UniqueName: \"kubernetes.io/projected/6d5bf055-d90c-451f-aaf7-19140fcea291-kube-api-access-jzsrh\") pod \"metallb-operator-controller-manager-555548cdf7-hwkjk\" (UID: \"6d5bf055-d90c-451f-aaf7-19140fcea291\") " pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.791319 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6d5bf055-d90c-451f-aaf7-19140fcea291-webhook-cert\") pod \"metallb-operator-controller-manager-555548cdf7-hwkjk\" (UID: \"6d5bf055-d90c-451f-aaf7-19140fcea291\") " pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.791404 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzsrh\" (UniqueName: \"kubernetes.io/projected/6d5bf055-d90c-451f-aaf7-19140fcea291-kube-api-access-jzsrh\") pod \"metallb-operator-controller-manager-555548cdf7-hwkjk\" (UID: \"6d5bf055-d90c-451f-aaf7-19140fcea291\") " pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.791449 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6d5bf055-d90c-451f-aaf7-19140fcea291-apiservice-cert\") pod \"metallb-operator-controller-manager-555548cdf7-hwkjk\" (UID: \"6d5bf055-d90c-451f-aaf7-19140fcea291\") " pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.800639 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6d5bf055-d90c-451f-aaf7-19140fcea291-apiservice-cert\") pod \"metallb-operator-controller-manager-555548cdf7-hwkjk\" (UID: \"6d5bf055-d90c-451f-aaf7-19140fcea291\") " pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.800730 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6d5bf055-d90c-451f-aaf7-19140fcea291-webhook-cert\") pod \"metallb-operator-controller-manager-555548cdf7-hwkjk\" (UID: \"6d5bf055-d90c-451f-aaf7-19140fcea291\") " pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.816826 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzsrh\" (UniqueName: \"kubernetes.io/projected/6d5bf055-d90c-451f-aaf7-19140fcea291-kube-api-access-jzsrh\") pod \"metallb-operator-controller-manager-555548cdf7-hwkjk\" (UID: \"6d5bf055-d90c-451f-aaf7-19140fcea291\") " pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.842442 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f"] Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.843206 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.845538 4787 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.845636 4787 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.850439 4787 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-szgvv" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.874311 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f"] Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.893124 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bgm4\" (UniqueName: \"kubernetes.io/projected/a5c49848-4aac-4efa-8acf-7edcee5c2093-kube-api-access-5bgm4\") pod \"metallb-operator-webhook-server-78544bb5fb-sxw5f\" (UID: \"a5c49848-4aac-4efa-8acf-7edcee5c2093\") " pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.893196 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a5c49848-4aac-4efa-8acf-7edcee5c2093-apiservice-cert\") pod \"metallb-operator-webhook-server-78544bb5fb-sxw5f\" (UID: \"a5c49848-4aac-4efa-8acf-7edcee5c2093\") " pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.893274 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a5c49848-4aac-4efa-8acf-7edcee5c2093-webhook-cert\") pod \"metallb-operator-webhook-server-78544bb5fb-sxw5f\" (UID: \"a5c49848-4aac-4efa-8acf-7edcee5c2093\") " pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.927863 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.994610 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bgm4\" (UniqueName: \"kubernetes.io/projected/a5c49848-4aac-4efa-8acf-7edcee5c2093-kube-api-access-5bgm4\") pod \"metallb-operator-webhook-server-78544bb5fb-sxw5f\" (UID: \"a5c49848-4aac-4efa-8acf-7edcee5c2093\") " pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.994673 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a5c49848-4aac-4efa-8acf-7edcee5c2093-apiservice-cert\") pod \"metallb-operator-webhook-server-78544bb5fb-sxw5f\" (UID: \"a5c49848-4aac-4efa-8acf-7edcee5c2093\") " pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" Jan 27 08:04:50 crc kubenswrapper[4787]: I0127 08:04:50.994746 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a5c49848-4aac-4efa-8acf-7edcee5c2093-webhook-cert\") pod \"metallb-operator-webhook-server-78544bb5fb-sxw5f\" (UID: \"a5c49848-4aac-4efa-8acf-7edcee5c2093\") " pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" Jan 27 08:04:51 crc kubenswrapper[4787]: I0127 08:04:51.001224 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a5c49848-4aac-4efa-8acf-7edcee5c2093-webhook-cert\") pod \"metallb-operator-webhook-server-78544bb5fb-sxw5f\" (UID: \"a5c49848-4aac-4efa-8acf-7edcee5c2093\") " pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" Jan 27 08:04:51 crc kubenswrapper[4787]: I0127 08:04:51.002749 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a5c49848-4aac-4efa-8acf-7edcee5c2093-apiservice-cert\") pod \"metallb-operator-webhook-server-78544bb5fb-sxw5f\" (UID: \"a5c49848-4aac-4efa-8acf-7edcee5c2093\") " pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" Jan 27 08:04:51 crc kubenswrapper[4787]: I0127 08:04:51.014541 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bgm4\" (UniqueName: \"kubernetes.io/projected/a5c49848-4aac-4efa-8acf-7edcee5c2093-kube-api-access-5bgm4\") pod \"metallb-operator-webhook-server-78544bb5fb-sxw5f\" (UID: \"a5c49848-4aac-4efa-8acf-7edcee5c2093\") " pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" Jan 27 08:04:51 crc kubenswrapper[4787]: I0127 08:04:51.165262 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" Jan 27 08:04:51 crc kubenswrapper[4787]: I0127 08:04:51.223289 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk"] Jan 27 08:04:51 crc kubenswrapper[4787]: I0127 08:04:51.432931 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f"] Jan 27 08:04:51 crc kubenswrapper[4787]: W0127 08:04:51.447541 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5c49848_4aac_4efa_8acf_7edcee5c2093.slice/crio-b827e913033f8581661468b8e2587c39c3961653cb81c09184b39cc7b6bfb075 WatchSource:0}: Error finding container b827e913033f8581661468b8e2587c39c3961653cb81c09184b39cc7b6bfb075: Status 404 returned error can't find the container with id b827e913033f8581661468b8e2587c39c3961653cb81c09184b39cc7b6bfb075 Jan 27 08:04:51 crc kubenswrapper[4787]: I0127 08:04:51.796792 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" event={"ID":"a5c49848-4aac-4efa-8acf-7edcee5c2093","Type":"ContainerStarted","Data":"b827e913033f8581661468b8e2587c39c3961653cb81c09184b39cc7b6bfb075"} Jan 27 08:04:51 crc kubenswrapper[4787]: I0127 08:04:51.797985 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" event={"ID":"6d5bf055-d90c-451f-aaf7-19140fcea291","Type":"ContainerStarted","Data":"4d9450fdd357ae7b95b84cb88f28347313189725148bd5ef1fd73de1e380aa98"} Jan 27 08:04:52 crc kubenswrapper[4787]: I0127 08:04:52.823421 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:04:52 crc kubenswrapper[4787]: I0127 08:04:52.823519 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:04:56 crc kubenswrapper[4787]: I0127 08:04:56.848491 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" event={"ID":"a5c49848-4aac-4efa-8acf-7edcee5c2093","Type":"ContainerStarted","Data":"926f125b1130617957d8c188fc86ae5816d96f3b1b4a25546fba0c7933b77bc1"} Jan 27 08:04:56 crc kubenswrapper[4787]: I0127 08:04:56.849115 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" Jan 27 08:04:56 crc kubenswrapper[4787]: I0127 08:04:56.850007 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" event={"ID":"6d5bf055-d90c-451f-aaf7-19140fcea291","Type":"ContainerStarted","Data":"1811eeec1b01394bf7d9c8c567014e98d1bea80df94e8621f7e3690c74693473"} Jan 27 08:04:56 crc kubenswrapper[4787]: I0127 08:04:56.850271 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" Jan 27 08:04:56 crc kubenswrapper[4787]: I0127 08:04:56.874802 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" podStartSLOduration=1.997817036 podStartE2EDuration="6.874770222s" podCreationTimestamp="2026-01-27 08:04:50 +0000 UTC" firstStartedPulling="2026-01-27 08:04:51.450393707 +0000 UTC m=+797.102749199" lastFinishedPulling="2026-01-27 08:04:56.327346873 +0000 UTC m=+801.979702385" observedRunningTime="2026-01-27 08:04:56.872088053 +0000 UTC m=+802.524443555" watchObservedRunningTime="2026-01-27 08:04:56.874770222 +0000 UTC m=+802.527125714" Jan 27 08:04:56 crc kubenswrapper[4787]: I0127 08:04:56.899104 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" podStartSLOduration=1.878779843 podStartE2EDuration="6.899078348s" podCreationTimestamp="2026-01-27 08:04:50 +0000 UTC" firstStartedPulling="2026-01-27 08:04:51.272444507 +0000 UTC m=+796.924799999" lastFinishedPulling="2026-01-27 08:04:56.292743012 +0000 UTC m=+801.945098504" observedRunningTime="2026-01-27 08:04:56.895330525 +0000 UTC m=+802.547686047" watchObservedRunningTime="2026-01-27 08:04:56.899078348 +0000 UTC m=+802.551433880" Jan 27 08:05:11 crc kubenswrapper[4787]: I0127 08:05:11.170617 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-78544bb5fb-sxw5f" Jan 27 08:05:22 crc kubenswrapper[4787]: I0127 08:05:22.823270 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:05:22 crc kubenswrapper[4787]: I0127 08:05:22.825630 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:05:30 crc kubenswrapper[4787]: I0127 08:05:30.931956 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-555548cdf7-hwkjk" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.747777 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-pp6fl"] Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.750881 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.753442 4787 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-5tpq8" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.757916 4787 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.759215 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q"] Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.760492 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.762531 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.766086 4787 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.774339 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q"] Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.847007 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-t2fkx"] Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.847950 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-t2fkx" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.850128 4787 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.850700 4787 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.851002 4787 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-w76xj" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.851281 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.868649 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-k24s7"] Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.870007 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-k24s7" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.872864 4787 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.888214 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-k24s7"] Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.934526 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbl5b\" (UniqueName: \"kubernetes.io/projected/49a597ca-3bfc-4377-a49b-19337d609659-kube-api-access-kbl5b\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.935318 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/49a597ca-3bfc-4377-a49b-19337d609659-reloader\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.935394 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/49a597ca-3bfc-4377-a49b-19337d609659-metrics\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.935439 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/49a597ca-3bfc-4377-a49b-19337d609659-frr-conf\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.935482 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/49a597ca-3bfc-4377-a49b-19337d609659-frr-startup\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.935505 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8s5m\" (UniqueName: \"kubernetes.io/projected/360229c9-082c-4e85-8800-c5f8717fd8c4-kube-api-access-s8s5m\") pod \"frr-k8s-webhook-server-7df86c4f6c-vfl8q\" (UID: \"360229c9-082c-4e85-8800-c5f8717fd8c4\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.935564 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/360229c9-082c-4e85-8800-c5f8717fd8c4-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-vfl8q\" (UID: \"360229c9-082c-4e85-8800-c5f8717fd8c4\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.935591 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/49a597ca-3bfc-4377-a49b-19337d609659-frr-sockets\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:31 crc kubenswrapper[4787]: I0127 08:05:31.935605 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/49a597ca-3bfc-4377-a49b-19337d609659-metrics-certs\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.037693 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/49a597ca-3bfc-4377-a49b-19337d609659-frr-conf\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.037764 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f5e61cdf-e660-42e7-b43c-42afb781223b-metallb-excludel2\") pod \"speaker-t2fkx\" (UID: \"f5e61cdf-e660-42e7-b43c-42afb781223b\") " pod="metallb-system/speaker-t2fkx" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.037796 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2glj\" (UniqueName: \"kubernetes.io/projected/f9b9a161-da5c-416b-9713-5fb85ee005fb-kube-api-access-t2glj\") pod \"controller-6968d8fdc4-k24s7\" (UID: \"f9b9a161-da5c-416b-9713-5fb85ee005fb\") " pod="metallb-system/controller-6968d8fdc4-k24s7" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.037822 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/49a597ca-3bfc-4377-a49b-19337d609659-frr-startup\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.037844 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8s5m\" (UniqueName: \"kubernetes.io/projected/360229c9-082c-4e85-8800-c5f8717fd8c4-kube-api-access-s8s5m\") pod \"frr-k8s-webhook-server-7df86c4f6c-vfl8q\" (UID: \"360229c9-082c-4e85-8800-c5f8717fd8c4\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.037868 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9b9a161-da5c-416b-9713-5fb85ee005fb-metrics-certs\") pod \"controller-6968d8fdc4-k24s7\" (UID: \"f9b9a161-da5c-416b-9713-5fb85ee005fb\") " pod="metallb-system/controller-6968d8fdc4-k24s7" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.038195 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/49a597ca-3bfc-4377-a49b-19337d609659-frr-conf\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.038192 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/360229c9-082c-4e85-8800-c5f8717fd8c4-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-vfl8q\" (UID: \"360229c9-082c-4e85-8800-c5f8717fd8c4\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.038308 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/49a597ca-3bfc-4377-a49b-19337d609659-frr-sockets\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.038329 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/49a597ca-3bfc-4377-a49b-19337d609659-metrics-certs\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.038369 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbl5b\" (UniqueName: \"kubernetes.io/projected/49a597ca-3bfc-4377-a49b-19337d609659-kube-api-access-kbl5b\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.038399 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5e61cdf-e660-42e7-b43c-42afb781223b-metrics-certs\") pod \"speaker-t2fkx\" (UID: \"f5e61cdf-e660-42e7-b43c-42afb781223b\") " pod="metallb-system/speaker-t2fkx" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.038435 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f9b9a161-da5c-416b-9713-5fb85ee005fb-cert\") pod \"controller-6968d8fdc4-k24s7\" (UID: \"f9b9a161-da5c-416b-9713-5fb85ee005fb\") " pod="metallb-system/controller-6968d8fdc4-k24s7" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.038467 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/49a597ca-3bfc-4377-a49b-19337d609659-reloader\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.038501 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bkqh\" (UniqueName: \"kubernetes.io/projected/f5e61cdf-e660-42e7-b43c-42afb781223b-kube-api-access-8bkqh\") pod \"speaker-t2fkx\" (UID: \"f5e61cdf-e660-42e7-b43c-42afb781223b\") " pod="metallb-system/speaker-t2fkx" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.038524 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/49a597ca-3bfc-4377-a49b-19337d609659-metrics\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.038544 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f5e61cdf-e660-42e7-b43c-42afb781223b-memberlist\") pod \"speaker-t2fkx\" (UID: \"f5e61cdf-e660-42e7-b43c-42afb781223b\") " pod="metallb-system/speaker-t2fkx" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.038787 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/49a597ca-3bfc-4377-a49b-19337d609659-frr-startup\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.038798 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/49a597ca-3bfc-4377-a49b-19337d609659-frr-sockets\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.039036 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/49a597ca-3bfc-4377-a49b-19337d609659-reloader\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.039183 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/49a597ca-3bfc-4377-a49b-19337d609659-metrics\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.044889 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/49a597ca-3bfc-4377-a49b-19337d609659-metrics-certs\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.045055 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/360229c9-082c-4e85-8800-c5f8717fd8c4-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-vfl8q\" (UID: \"360229c9-082c-4e85-8800-c5f8717fd8c4\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.056571 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbl5b\" (UniqueName: \"kubernetes.io/projected/49a597ca-3bfc-4377-a49b-19337d609659-kube-api-access-kbl5b\") pod \"frr-k8s-pp6fl\" (UID: \"49a597ca-3bfc-4377-a49b-19337d609659\") " pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.071391 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8s5m\" (UniqueName: \"kubernetes.io/projected/360229c9-082c-4e85-8800-c5f8717fd8c4-kube-api-access-s8s5m\") pod \"frr-k8s-webhook-server-7df86c4f6c-vfl8q\" (UID: \"360229c9-082c-4e85-8800-c5f8717fd8c4\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.075777 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.086136 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.139009 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5e61cdf-e660-42e7-b43c-42afb781223b-metrics-certs\") pod \"speaker-t2fkx\" (UID: \"f5e61cdf-e660-42e7-b43c-42afb781223b\") " pod="metallb-system/speaker-t2fkx" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.139073 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f9b9a161-da5c-416b-9713-5fb85ee005fb-cert\") pod \"controller-6968d8fdc4-k24s7\" (UID: \"f9b9a161-da5c-416b-9713-5fb85ee005fb\") " pod="metallb-system/controller-6968d8fdc4-k24s7" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.139110 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bkqh\" (UniqueName: \"kubernetes.io/projected/f5e61cdf-e660-42e7-b43c-42afb781223b-kube-api-access-8bkqh\") pod \"speaker-t2fkx\" (UID: \"f5e61cdf-e660-42e7-b43c-42afb781223b\") " pod="metallb-system/speaker-t2fkx" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.139127 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f5e61cdf-e660-42e7-b43c-42afb781223b-memberlist\") pod \"speaker-t2fkx\" (UID: \"f5e61cdf-e660-42e7-b43c-42afb781223b\") " pod="metallb-system/speaker-t2fkx" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.139165 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f5e61cdf-e660-42e7-b43c-42afb781223b-metallb-excludel2\") pod \"speaker-t2fkx\" (UID: \"f5e61cdf-e660-42e7-b43c-42afb781223b\") " pod="metallb-system/speaker-t2fkx" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.139186 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2glj\" (UniqueName: \"kubernetes.io/projected/f9b9a161-da5c-416b-9713-5fb85ee005fb-kube-api-access-t2glj\") pod \"controller-6968d8fdc4-k24s7\" (UID: \"f9b9a161-da5c-416b-9713-5fb85ee005fb\") " pod="metallb-system/controller-6968d8fdc4-k24s7" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.139206 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9b9a161-da5c-416b-9713-5fb85ee005fb-metrics-certs\") pod \"controller-6968d8fdc4-k24s7\" (UID: \"f9b9a161-da5c-416b-9713-5fb85ee005fb\") " pod="metallb-system/controller-6968d8fdc4-k24s7" Jan 27 08:05:32 crc kubenswrapper[4787]: E0127 08:05:32.139585 4787 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 27 08:05:32 crc kubenswrapper[4787]: E0127 08:05:32.139699 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5e61cdf-e660-42e7-b43c-42afb781223b-memberlist podName:f5e61cdf-e660-42e7-b43c-42afb781223b nodeName:}" failed. No retries permitted until 2026-01-27 08:05:32.639666676 +0000 UTC m=+838.292022168 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f5e61cdf-e660-42e7-b43c-42afb781223b-memberlist") pod "speaker-t2fkx" (UID: "f5e61cdf-e660-42e7-b43c-42afb781223b") : secret "metallb-memberlist" not found Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.140936 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f5e61cdf-e660-42e7-b43c-42afb781223b-metallb-excludel2\") pod \"speaker-t2fkx\" (UID: \"f5e61cdf-e660-42e7-b43c-42afb781223b\") " pod="metallb-system/speaker-t2fkx" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.144613 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5e61cdf-e660-42e7-b43c-42afb781223b-metrics-certs\") pod \"speaker-t2fkx\" (UID: \"f5e61cdf-e660-42e7-b43c-42afb781223b\") " pod="metallb-system/speaker-t2fkx" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.144719 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f9b9a161-da5c-416b-9713-5fb85ee005fb-cert\") pod \"controller-6968d8fdc4-k24s7\" (UID: \"f9b9a161-da5c-416b-9713-5fb85ee005fb\") " pod="metallb-system/controller-6968d8fdc4-k24s7" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.147603 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9b9a161-da5c-416b-9713-5fb85ee005fb-metrics-certs\") pod \"controller-6968d8fdc4-k24s7\" (UID: \"f9b9a161-da5c-416b-9713-5fb85ee005fb\") " pod="metallb-system/controller-6968d8fdc4-k24s7" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.162669 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2glj\" (UniqueName: \"kubernetes.io/projected/f9b9a161-da5c-416b-9713-5fb85ee005fb-kube-api-access-t2glj\") pod \"controller-6968d8fdc4-k24s7\" (UID: \"f9b9a161-da5c-416b-9713-5fb85ee005fb\") " pod="metallb-system/controller-6968d8fdc4-k24s7" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.165785 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bkqh\" (UniqueName: \"kubernetes.io/projected/f5e61cdf-e660-42e7-b43c-42afb781223b-kube-api-access-8bkqh\") pod \"speaker-t2fkx\" (UID: \"f5e61cdf-e660-42e7-b43c-42afb781223b\") " pod="metallb-system/speaker-t2fkx" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.188905 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-k24s7" Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.360143 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q"] Jan 27 08:05:32 crc kubenswrapper[4787]: W0127 08:05:32.369111 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod360229c9_082c_4e85_8800_c5f8717fd8c4.slice/crio-2f4a3a28e0cbef9d370f5a2e53f7b82cc2171d4ebff79a166b4e009c2840f49b WatchSource:0}: Error finding container 2f4a3a28e0cbef9d370f5a2e53f7b82cc2171d4ebff79a166b4e009c2840f49b: Status 404 returned error can't find the container with id 2f4a3a28e0cbef9d370f5a2e53f7b82cc2171d4ebff79a166b4e009c2840f49b Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.643731 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-k24s7"] Jan 27 08:05:32 crc kubenswrapper[4787]: I0127 08:05:32.646663 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f5e61cdf-e660-42e7-b43c-42afb781223b-memberlist\") pod \"speaker-t2fkx\" (UID: \"f5e61cdf-e660-42e7-b43c-42afb781223b\") " pod="metallb-system/speaker-t2fkx" Jan 27 08:05:32 crc kubenswrapper[4787]: E0127 08:05:32.646879 4787 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 27 08:05:32 crc kubenswrapper[4787]: E0127 08:05:32.646999 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5e61cdf-e660-42e7-b43c-42afb781223b-memberlist podName:f5e61cdf-e660-42e7-b43c-42afb781223b nodeName:}" failed. No retries permitted until 2026-01-27 08:05:33.64697157 +0000 UTC m=+839.299327062 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f5e61cdf-e660-42e7-b43c-42afb781223b-memberlist") pod "speaker-t2fkx" (UID: "f5e61cdf-e660-42e7-b43c-42afb781223b") : secret "metallb-memberlist" not found Jan 27 08:05:32 crc kubenswrapper[4787]: W0127 08:05:32.649042 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9b9a161_da5c_416b_9713_5fb85ee005fb.slice/crio-91c4c8bb93f5387567a51e738a030b0fa6a4c1d18bb5308f214e57fd7c2b7992 WatchSource:0}: Error finding container 91c4c8bb93f5387567a51e738a030b0fa6a4c1d18bb5308f214e57fd7c2b7992: Status 404 returned error can't find the container with id 91c4c8bb93f5387567a51e738a030b0fa6a4c1d18bb5308f214e57fd7c2b7992 Jan 27 08:05:33 crc kubenswrapper[4787]: I0127 08:05:33.106384 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pp6fl" event={"ID":"49a597ca-3bfc-4377-a49b-19337d609659","Type":"ContainerStarted","Data":"bd2db11db06541ae69844001086e24fe848a05faf780baf0f5d0197556aebea1"} Jan 27 08:05:33 crc kubenswrapper[4787]: I0127 08:05:33.107920 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q" event={"ID":"360229c9-082c-4e85-8800-c5f8717fd8c4","Type":"ContainerStarted","Data":"2f4a3a28e0cbef9d370f5a2e53f7b82cc2171d4ebff79a166b4e009c2840f49b"} Jan 27 08:05:33 crc kubenswrapper[4787]: I0127 08:05:33.109823 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-k24s7" event={"ID":"f9b9a161-da5c-416b-9713-5fb85ee005fb","Type":"ContainerStarted","Data":"4cfde7e67fee9e3da12be81b7c690a087441866556cb0d0fef2dcddb6eb03d09"} Jan 27 08:05:33 crc kubenswrapper[4787]: I0127 08:05:33.109881 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-k24s7" event={"ID":"f9b9a161-da5c-416b-9713-5fb85ee005fb","Type":"ContainerStarted","Data":"6a62a114aff53887f3b5131bff051bafbb25e0eeb7c4055e403d850d42d65bfc"} Jan 27 08:05:33 crc kubenswrapper[4787]: I0127 08:05:33.109894 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-k24s7" event={"ID":"f9b9a161-da5c-416b-9713-5fb85ee005fb","Type":"ContainerStarted","Data":"91c4c8bb93f5387567a51e738a030b0fa6a4c1d18bb5308f214e57fd7c2b7992"} Jan 27 08:05:33 crc kubenswrapper[4787]: I0127 08:05:33.109985 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-k24s7" Jan 27 08:05:33 crc kubenswrapper[4787]: I0127 08:05:33.132522 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-k24s7" podStartSLOduration=2.132498605 podStartE2EDuration="2.132498605s" podCreationTimestamp="2026-01-27 08:05:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 08:05:33.128951898 +0000 UTC m=+838.781307420" watchObservedRunningTime="2026-01-27 08:05:33.132498605 +0000 UTC m=+838.784854097" Jan 27 08:05:33 crc kubenswrapper[4787]: I0127 08:05:33.660179 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f5e61cdf-e660-42e7-b43c-42afb781223b-memberlist\") pod \"speaker-t2fkx\" (UID: \"f5e61cdf-e660-42e7-b43c-42afb781223b\") " pod="metallb-system/speaker-t2fkx" Jan 27 08:05:33 crc kubenswrapper[4787]: I0127 08:05:33.669889 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f5e61cdf-e660-42e7-b43c-42afb781223b-memberlist\") pod \"speaker-t2fkx\" (UID: \"f5e61cdf-e660-42e7-b43c-42afb781223b\") " pod="metallb-system/speaker-t2fkx" Jan 27 08:05:33 crc kubenswrapper[4787]: I0127 08:05:33.963307 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-t2fkx" Jan 27 08:05:34 crc kubenswrapper[4787]: I0127 08:05:34.118653 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-t2fkx" event={"ID":"f5e61cdf-e660-42e7-b43c-42afb781223b","Type":"ContainerStarted","Data":"06b472d51d3741a222a2d3df7e9c77bbdf3f613ed002f10c21cef0762fb26979"} Jan 27 08:05:35 crc kubenswrapper[4787]: I0127 08:05:35.147790 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-t2fkx" event={"ID":"f5e61cdf-e660-42e7-b43c-42afb781223b","Type":"ContainerStarted","Data":"9e5c7826cef270c18da0240a2bdd27416720a27547a6aed98ca67b82f0b11db0"} Jan 27 08:05:35 crc kubenswrapper[4787]: I0127 08:05:35.148143 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-t2fkx" event={"ID":"f5e61cdf-e660-42e7-b43c-42afb781223b","Type":"ContainerStarted","Data":"ca4605ed486e1fb45d91ab9669c2f91299eb61571ccdf48f1099ca84ea11cf56"} Jan 27 08:05:35 crc kubenswrapper[4787]: I0127 08:05:35.148173 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-t2fkx" Jan 27 08:05:35 crc kubenswrapper[4787]: I0127 08:05:35.202814 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-t2fkx" podStartSLOduration=4.202787278 podStartE2EDuration="4.202787278s" podCreationTimestamp="2026-01-27 08:05:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 08:05:35.202237256 +0000 UTC m=+840.854592758" watchObservedRunningTime="2026-01-27 08:05:35.202787278 +0000 UTC m=+840.855142770" Jan 27 08:05:41 crc kubenswrapper[4787]: I0127 08:05:41.200134 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q" event={"ID":"360229c9-082c-4e85-8800-c5f8717fd8c4","Type":"ContainerStarted","Data":"e2a9ed1e09616e004f2f92ef2b88e97eaa992d3d2c9c461ef3e94a64699eda47"} Jan 27 08:05:41 crc kubenswrapper[4787]: I0127 08:05:41.200884 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q" Jan 27 08:05:41 crc kubenswrapper[4787]: I0127 08:05:41.202517 4787 generic.go:334] "Generic (PLEG): container finished" podID="49a597ca-3bfc-4377-a49b-19337d609659" containerID="50bcf8228d7ea362a5e757cd344c10dd3d9c7face6a6cc8141f2de6ef3a9e651" exitCode=0 Jan 27 08:05:41 crc kubenswrapper[4787]: I0127 08:05:41.202599 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pp6fl" event={"ID":"49a597ca-3bfc-4377-a49b-19337d609659","Type":"ContainerDied","Data":"50bcf8228d7ea362a5e757cd344c10dd3d9c7face6a6cc8141f2de6ef3a9e651"} Jan 27 08:05:41 crc kubenswrapper[4787]: I0127 08:05:41.218742 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q" podStartSLOduration=2.341479229 podStartE2EDuration="10.218718734s" podCreationTimestamp="2026-01-27 08:05:31 +0000 UTC" firstStartedPulling="2026-01-27 08:05:32.373827974 +0000 UTC m=+838.026183466" lastFinishedPulling="2026-01-27 08:05:40.251067479 +0000 UTC m=+845.903422971" observedRunningTime="2026-01-27 08:05:41.215512163 +0000 UTC m=+846.867867665" watchObservedRunningTime="2026-01-27 08:05:41.218718734 +0000 UTC m=+846.871074226" Jan 27 08:05:42 crc kubenswrapper[4787]: I0127 08:05:42.195159 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-k24s7" Jan 27 08:05:42 crc kubenswrapper[4787]: I0127 08:05:42.214139 4787 generic.go:334] "Generic (PLEG): container finished" podID="49a597ca-3bfc-4377-a49b-19337d609659" containerID="f13de1b16a2b0ec1f03cb3fe91834ff469a9bc16e3421fb4da9faa6607c536f2" exitCode=0 Jan 27 08:05:42 crc kubenswrapper[4787]: I0127 08:05:42.215516 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pp6fl" event={"ID":"49a597ca-3bfc-4377-a49b-19337d609659","Type":"ContainerDied","Data":"f13de1b16a2b0ec1f03cb3fe91834ff469a9bc16e3421fb4da9faa6607c536f2"} Jan 27 08:05:43 crc kubenswrapper[4787]: I0127 08:05:43.239256 4787 generic.go:334] "Generic (PLEG): container finished" podID="49a597ca-3bfc-4377-a49b-19337d609659" containerID="20d2ac42c475dddad531d4931d49dbc8383f5f9d2b460531af93bb3060b498d9" exitCode=0 Jan 27 08:05:43 crc kubenswrapper[4787]: I0127 08:05:43.239423 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pp6fl" event={"ID":"49a597ca-3bfc-4377-a49b-19337d609659","Type":"ContainerDied","Data":"20d2ac42c475dddad531d4931d49dbc8383f5f9d2b460531af93bb3060b498d9"} Jan 27 08:05:44 crc kubenswrapper[4787]: I0127 08:05:44.263052 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pp6fl" event={"ID":"49a597ca-3bfc-4377-a49b-19337d609659","Type":"ContainerStarted","Data":"194910e1faef0b7d8085d8f3869b5f4854c7ed83a27e8a96a3adc010d1d1ac16"} Jan 27 08:05:44 crc kubenswrapper[4787]: I0127 08:05:44.263624 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pp6fl" event={"ID":"49a597ca-3bfc-4377-a49b-19337d609659","Type":"ContainerStarted","Data":"277912d65d006ba1d755ac834c7ece0e339da4e6e7d41f67f6e0706c4ece1313"} Jan 27 08:05:44 crc kubenswrapper[4787]: I0127 08:05:44.263641 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pp6fl" event={"ID":"49a597ca-3bfc-4377-a49b-19337d609659","Type":"ContainerStarted","Data":"b3fd2aaee7e391d06dfe092d23d13e02cfb77ef9daeb71f37af3ec85b9fbb70e"} Jan 27 08:05:44 crc kubenswrapper[4787]: I0127 08:05:44.263656 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pp6fl" event={"ID":"49a597ca-3bfc-4377-a49b-19337d609659","Type":"ContainerStarted","Data":"ffc45d88f8d956c5501fc0b89149767e48468c9e6b207d8762990fdc1405a9b0"} Jan 27 08:05:44 crc kubenswrapper[4787]: I0127 08:05:44.263670 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pp6fl" event={"ID":"49a597ca-3bfc-4377-a49b-19337d609659","Type":"ContainerStarted","Data":"19da2fd3b4347ba1f6088ec73757bc008c1694a80386070ae63a61fcaeb58790"} Jan 27 08:05:45 crc kubenswrapper[4787]: I0127 08:05:45.278350 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pp6fl" event={"ID":"49a597ca-3bfc-4377-a49b-19337d609659","Type":"ContainerStarted","Data":"7ddbbc9d072cf882e76c4feab1a22d3c2d6092306a47f0a04f3b3a03447b44cc"} Jan 27 08:05:45 crc kubenswrapper[4787]: I0127 08:05:45.278872 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:45 crc kubenswrapper[4787]: I0127 08:05:45.307738 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-pp6fl" podStartSLOduration=6.352553844 podStartE2EDuration="14.307710336s" podCreationTimestamp="2026-01-27 08:05:31 +0000 UTC" firstStartedPulling="2026-01-27 08:05:32.276833607 +0000 UTC m=+837.929189129" lastFinishedPulling="2026-01-27 08:05:40.231990119 +0000 UTC m=+845.884345621" observedRunningTime="2026-01-27 08:05:45.301469332 +0000 UTC m=+850.953824834" watchObservedRunningTime="2026-01-27 08:05:45.307710336 +0000 UTC m=+850.960065858" Jan 27 08:05:47 crc kubenswrapper[4787]: I0127 08:05:47.089623 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:47 crc kubenswrapper[4787]: I0127 08:05:47.124706 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:05:48 crc kubenswrapper[4787]: I0127 08:05:48.995265 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-x2v88"] Jan 27 08:05:48 crc kubenswrapper[4787]: I0127 08:05:48.997139 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:05:49 crc kubenswrapper[4787]: I0127 08:05:49.012343 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x2v88"] Jan 27 08:05:49 crc kubenswrapper[4787]: I0127 08:05:49.052354 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157309b8-f7e0-498b-ab3f-90f05cd34dfd-utilities\") pod \"community-operators-x2v88\" (UID: \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\") " pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:05:49 crc kubenswrapper[4787]: I0127 08:05:49.052678 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g5xb\" (UniqueName: \"kubernetes.io/projected/157309b8-f7e0-498b-ab3f-90f05cd34dfd-kube-api-access-7g5xb\") pod \"community-operators-x2v88\" (UID: \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\") " pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:05:49 crc kubenswrapper[4787]: I0127 08:05:49.052846 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157309b8-f7e0-498b-ab3f-90f05cd34dfd-catalog-content\") pod \"community-operators-x2v88\" (UID: \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\") " pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:05:49 crc kubenswrapper[4787]: I0127 08:05:49.153582 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157309b8-f7e0-498b-ab3f-90f05cd34dfd-catalog-content\") pod \"community-operators-x2v88\" (UID: \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\") " pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:05:49 crc kubenswrapper[4787]: I0127 08:05:49.153647 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157309b8-f7e0-498b-ab3f-90f05cd34dfd-utilities\") pod \"community-operators-x2v88\" (UID: \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\") " pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:05:49 crc kubenswrapper[4787]: I0127 08:05:49.153694 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g5xb\" (UniqueName: \"kubernetes.io/projected/157309b8-f7e0-498b-ab3f-90f05cd34dfd-kube-api-access-7g5xb\") pod \"community-operators-x2v88\" (UID: \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\") " pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:05:49 crc kubenswrapper[4787]: I0127 08:05:49.154260 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157309b8-f7e0-498b-ab3f-90f05cd34dfd-utilities\") pod \"community-operators-x2v88\" (UID: \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\") " pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:05:49 crc kubenswrapper[4787]: I0127 08:05:49.154260 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157309b8-f7e0-498b-ab3f-90f05cd34dfd-catalog-content\") pod \"community-operators-x2v88\" (UID: \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\") " pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:05:49 crc kubenswrapper[4787]: I0127 08:05:49.172697 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g5xb\" (UniqueName: \"kubernetes.io/projected/157309b8-f7e0-498b-ab3f-90f05cd34dfd-kube-api-access-7g5xb\") pod \"community-operators-x2v88\" (UID: \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\") " pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:05:49 crc kubenswrapper[4787]: I0127 08:05:49.326106 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:05:49 crc kubenswrapper[4787]: I0127 08:05:49.830759 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x2v88"] Jan 27 08:05:49 crc kubenswrapper[4787]: W0127 08:05:49.840422 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod157309b8_f7e0_498b_ab3f_90f05cd34dfd.slice/crio-861fbb815ca629201014bdc0a87c1bdc00f076dcd52a0a4b5e5bbd54e56fb253 WatchSource:0}: Error finding container 861fbb815ca629201014bdc0a87c1bdc00f076dcd52a0a4b5e5bbd54e56fb253: Status 404 returned error can't find the container with id 861fbb815ca629201014bdc0a87c1bdc00f076dcd52a0a4b5e5bbd54e56fb253 Jan 27 08:05:50 crc kubenswrapper[4787]: I0127 08:05:50.315745 4787 generic.go:334] "Generic (PLEG): container finished" podID="157309b8-f7e0-498b-ab3f-90f05cd34dfd" containerID="783a62d9c4efde8e927f42db883dfc3694395b59698a43664bbaa75028b9fedb" exitCode=0 Jan 27 08:05:50 crc kubenswrapper[4787]: I0127 08:05:50.315880 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2v88" event={"ID":"157309b8-f7e0-498b-ab3f-90f05cd34dfd","Type":"ContainerDied","Data":"783a62d9c4efde8e927f42db883dfc3694395b59698a43664bbaa75028b9fedb"} Jan 27 08:05:50 crc kubenswrapper[4787]: I0127 08:05:50.317263 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2v88" event={"ID":"157309b8-f7e0-498b-ab3f-90f05cd34dfd","Type":"ContainerStarted","Data":"861fbb815ca629201014bdc0a87c1bdc00f076dcd52a0a4b5e5bbd54e56fb253"} Jan 27 08:05:51 crc kubenswrapper[4787]: I0127 08:05:51.327160 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2v88" event={"ID":"157309b8-f7e0-498b-ab3f-90f05cd34dfd","Type":"ContainerStarted","Data":"d3c461dc67225f8ec7d2fd953bf5df3c4ba58404084ae0214331e82ea98e7c63"} Jan 27 08:05:52 crc kubenswrapper[4787]: I0127 08:05:52.096127 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vfl8q" Jan 27 08:05:52 crc kubenswrapper[4787]: I0127 08:05:52.335466 4787 generic.go:334] "Generic (PLEG): container finished" podID="157309b8-f7e0-498b-ab3f-90f05cd34dfd" containerID="d3c461dc67225f8ec7d2fd953bf5df3c4ba58404084ae0214331e82ea98e7c63" exitCode=0 Jan 27 08:05:52 crc kubenswrapper[4787]: I0127 08:05:52.335535 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2v88" event={"ID":"157309b8-f7e0-498b-ab3f-90f05cd34dfd","Type":"ContainerDied","Data":"d3c461dc67225f8ec7d2fd953bf5df3c4ba58404084ae0214331e82ea98e7c63"} Jan 27 08:05:52 crc kubenswrapper[4787]: I0127 08:05:52.823737 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:05:52 crc kubenswrapper[4787]: I0127 08:05:52.823820 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:05:52 crc kubenswrapper[4787]: I0127 08:05:52.823874 4787 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 08:05:52 crc kubenswrapper[4787]: I0127 08:05:52.824643 4787 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"76c11a5da51cedd24f4d664015e61242e532ce42823eb519b69423acc27d454d"} pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 27 08:05:52 crc kubenswrapper[4787]: I0127 08:05:52.824713 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" containerID="cri-o://76c11a5da51cedd24f4d664015e61242e532ce42823eb519b69423acc27d454d" gracePeriod=600 Jan 27 08:05:53 crc kubenswrapper[4787]: I0127 08:05:53.346371 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2v88" event={"ID":"157309b8-f7e0-498b-ab3f-90f05cd34dfd","Type":"ContainerStarted","Data":"9df41c8f928a5bb0401c44c1e75deb1e86678734d9c68d143045d3bb5b5ca180"} Jan 27 08:05:53 crc kubenswrapper[4787]: I0127 08:05:53.349291 4787 generic.go:334] "Generic (PLEG): container finished" podID="f051e184-acac-47cf-9e04-9df648288715" containerID="76c11a5da51cedd24f4d664015e61242e532ce42823eb519b69423acc27d454d" exitCode=0 Jan 27 08:05:53 crc kubenswrapper[4787]: I0127 08:05:53.349356 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerDied","Data":"76c11a5da51cedd24f4d664015e61242e532ce42823eb519b69423acc27d454d"} Jan 27 08:05:53 crc kubenswrapper[4787]: I0127 08:05:53.349385 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerStarted","Data":"8407f8fd138ff9e300a7e8488e02cc30a66d50ed89a7c4ef1d3339c94e2a22b5"} Jan 27 08:05:53 crc kubenswrapper[4787]: I0127 08:05:53.349408 4787 scope.go:117] "RemoveContainer" containerID="e4c1a6fd72ac92bada26c4a251440b894a7362ee02ad76996c3668ab0c3d7b09" Jan 27 08:05:53 crc kubenswrapper[4787]: I0127 08:05:53.375483 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-x2v88" podStartSLOduration=2.89044263 podStartE2EDuration="5.37545487s" podCreationTimestamp="2026-01-27 08:05:48 +0000 UTC" firstStartedPulling="2026-01-27 08:05:50.319913908 +0000 UTC m=+855.972269410" lastFinishedPulling="2026-01-27 08:05:52.804926138 +0000 UTC m=+858.457281650" observedRunningTime="2026-01-27 08:05:53.366284108 +0000 UTC m=+859.018639610" watchObservedRunningTime="2026-01-27 08:05:53.37545487 +0000 UTC m=+859.027810372" Jan 27 08:05:53 crc kubenswrapper[4787]: I0127 08:05:53.969292 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-t2fkx" Jan 27 08:05:55 crc kubenswrapper[4787]: I0127 08:05:55.610924 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq"] Jan 27 08:05:55 crc kubenswrapper[4787]: I0127 08:05:55.612650 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" Jan 27 08:05:55 crc kubenswrapper[4787]: I0127 08:05:55.615156 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 27 08:05:55 crc kubenswrapper[4787]: I0127 08:05:55.630881 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq"] Jan 27 08:05:55 crc kubenswrapper[4787]: I0127 08:05:55.756601 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpshb\" (UniqueName: \"kubernetes.io/projected/9ba6b24a-de23-4528-87fc-c2932e3beb6e-kube-api-access-vpshb\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq\" (UID: \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" Jan 27 08:05:55 crc kubenswrapper[4787]: I0127 08:05:55.756668 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9ba6b24a-de23-4528-87fc-c2932e3beb6e-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq\" (UID: \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" Jan 27 08:05:55 crc kubenswrapper[4787]: I0127 08:05:55.756989 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9ba6b24a-de23-4528-87fc-c2932e3beb6e-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq\" (UID: \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" Jan 27 08:05:55 crc kubenswrapper[4787]: I0127 08:05:55.858902 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpshb\" (UniqueName: \"kubernetes.io/projected/9ba6b24a-de23-4528-87fc-c2932e3beb6e-kube-api-access-vpshb\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq\" (UID: \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" Jan 27 08:05:55 crc kubenswrapper[4787]: I0127 08:05:55.858974 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9ba6b24a-de23-4528-87fc-c2932e3beb6e-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq\" (UID: \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" Jan 27 08:05:55 crc kubenswrapper[4787]: I0127 08:05:55.859040 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9ba6b24a-de23-4528-87fc-c2932e3beb6e-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq\" (UID: \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" Jan 27 08:05:55 crc kubenswrapper[4787]: I0127 08:05:55.859695 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9ba6b24a-de23-4528-87fc-c2932e3beb6e-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq\" (UID: \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" Jan 27 08:05:55 crc kubenswrapper[4787]: I0127 08:05:55.859808 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9ba6b24a-de23-4528-87fc-c2932e3beb6e-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq\" (UID: \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" Jan 27 08:05:55 crc kubenswrapper[4787]: I0127 08:05:55.889457 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpshb\" (UniqueName: \"kubernetes.io/projected/9ba6b24a-de23-4528-87fc-c2932e3beb6e-kube-api-access-vpshb\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq\" (UID: \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" Jan 27 08:05:55 crc kubenswrapper[4787]: I0127 08:05:55.933094 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" Jan 27 08:05:56 crc kubenswrapper[4787]: I0127 08:05:56.157119 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq"] Jan 27 08:05:56 crc kubenswrapper[4787]: I0127 08:05:56.379460 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" event={"ID":"9ba6b24a-de23-4528-87fc-c2932e3beb6e","Type":"ContainerStarted","Data":"5f29ea04aece039d214830a95207446972a3ea9f53bf9cf1c752519753e9da5b"} Jan 27 08:05:56 crc kubenswrapper[4787]: I0127 08:05:56.379998 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" event={"ID":"9ba6b24a-de23-4528-87fc-c2932e3beb6e","Type":"ContainerStarted","Data":"10f9fd4a391efed59786dfdf11a4ceeac9813a3b731165bbc936f972ec892204"} Jan 27 08:05:57 crc kubenswrapper[4787]: I0127 08:05:57.388985 4787 generic.go:334] "Generic (PLEG): container finished" podID="9ba6b24a-de23-4528-87fc-c2932e3beb6e" containerID="5f29ea04aece039d214830a95207446972a3ea9f53bf9cf1c752519753e9da5b" exitCode=0 Jan 27 08:05:57 crc kubenswrapper[4787]: I0127 08:05:57.389073 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" event={"ID":"9ba6b24a-de23-4528-87fc-c2932e3beb6e","Type":"ContainerDied","Data":"5f29ea04aece039d214830a95207446972a3ea9f53bf9cf1c752519753e9da5b"} Jan 27 08:05:59 crc kubenswrapper[4787]: I0127 08:05:59.326250 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:05:59 crc kubenswrapper[4787]: I0127 08:05:59.327366 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:05:59 crc kubenswrapper[4787]: I0127 08:05:59.388109 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:05:59 crc kubenswrapper[4787]: I0127 08:05:59.442293 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:06:01 crc kubenswrapper[4787]: I0127 08:06:01.759839 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x2v88"] Jan 27 08:06:02 crc kubenswrapper[4787]: I0127 08:06:02.079455 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-pp6fl" Jan 27 08:06:02 crc kubenswrapper[4787]: I0127 08:06:02.426772 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" event={"ID":"9ba6b24a-de23-4528-87fc-c2932e3beb6e","Type":"ContainerStarted","Data":"07d22ab0377a276f80adc6d2b854c48196d36474e43eac44056f1d9c8569fcff"} Jan 27 08:06:02 crc kubenswrapper[4787]: I0127 08:06:02.426934 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-x2v88" podUID="157309b8-f7e0-498b-ab3f-90f05cd34dfd" containerName="registry-server" containerID="cri-o://9df41c8f928a5bb0401c44c1e75deb1e86678734d9c68d143045d3bb5b5ca180" gracePeriod=2 Jan 27 08:06:03 crc kubenswrapper[4787]: I0127 08:06:03.436365 4787 generic.go:334] "Generic (PLEG): container finished" podID="9ba6b24a-de23-4528-87fc-c2932e3beb6e" containerID="07d22ab0377a276f80adc6d2b854c48196d36474e43eac44056f1d9c8569fcff" exitCode=0 Jan 27 08:06:03 crc kubenswrapper[4787]: I0127 08:06:03.436442 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" event={"ID":"9ba6b24a-de23-4528-87fc-c2932e3beb6e","Type":"ContainerDied","Data":"07d22ab0377a276f80adc6d2b854c48196d36474e43eac44056f1d9c8569fcff"} Jan 27 08:06:03 crc kubenswrapper[4787]: I0127 08:06:03.447655 4787 generic.go:334] "Generic (PLEG): container finished" podID="157309b8-f7e0-498b-ab3f-90f05cd34dfd" containerID="9df41c8f928a5bb0401c44c1e75deb1e86678734d9c68d143045d3bb5b5ca180" exitCode=0 Jan 27 08:06:03 crc kubenswrapper[4787]: I0127 08:06:03.447718 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2v88" event={"ID":"157309b8-f7e0-498b-ab3f-90f05cd34dfd","Type":"ContainerDied","Data":"9df41c8f928a5bb0401c44c1e75deb1e86678734d9c68d143045d3bb5b5ca180"} Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.327471 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.458913 4787 generic.go:334] "Generic (PLEG): container finished" podID="9ba6b24a-de23-4528-87fc-c2932e3beb6e" containerID="cefff68a6e6276878982ab6078637ced540e5b2f060fdec7c81331b0d7a9c76b" exitCode=0 Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.459010 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" event={"ID":"9ba6b24a-de23-4528-87fc-c2932e3beb6e","Type":"ContainerDied","Data":"cefff68a6e6276878982ab6078637ced540e5b2f060fdec7c81331b0d7a9c76b"} Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.462143 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2v88" event={"ID":"157309b8-f7e0-498b-ab3f-90f05cd34dfd","Type":"ContainerDied","Data":"861fbb815ca629201014bdc0a87c1bdc00f076dcd52a0a4b5e5bbd54e56fb253"} Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.462346 4787 scope.go:117] "RemoveContainer" containerID="9df41c8f928a5bb0401c44c1e75deb1e86678734d9c68d143045d3bb5b5ca180" Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.462214 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x2v88" Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.496025 4787 scope.go:117] "RemoveContainer" containerID="d3c461dc67225f8ec7d2fd953bf5df3c4ba58404084ae0214331e82ea98e7c63" Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.513909 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157309b8-f7e0-498b-ab3f-90f05cd34dfd-utilities\") pod \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\" (UID: \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\") " Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.514114 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157309b8-f7e0-498b-ab3f-90f05cd34dfd-catalog-content\") pod \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\" (UID: \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\") " Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.514174 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7g5xb\" (UniqueName: \"kubernetes.io/projected/157309b8-f7e0-498b-ab3f-90f05cd34dfd-kube-api-access-7g5xb\") pod \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\" (UID: \"157309b8-f7e0-498b-ab3f-90f05cd34dfd\") " Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.515107 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/157309b8-f7e0-498b-ab3f-90f05cd34dfd-utilities" (OuterVolumeSpecName: "utilities") pod "157309b8-f7e0-498b-ab3f-90f05cd34dfd" (UID: "157309b8-f7e0-498b-ab3f-90f05cd34dfd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.521537 4787 scope.go:117] "RemoveContainer" containerID="783a62d9c4efde8e927f42db883dfc3694395b59698a43664bbaa75028b9fedb" Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.521860 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/157309b8-f7e0-498b-ab3f-90f05cd34dfd-kube-api-access-7g5xb" (OuterVolumeSpecName: "kube-api-access-7g5xb") pod "157309b8-f7e0-498b-ab3f-90f05cd34dfd" (UID: "157309b8-f7e0-498b-ab3f-90f05cd34dfd"). InnerVolumeSpecName "kube-api-access-7g5xb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.604379 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/157309b8-f7e0-498b-ab3f-90f05cd34dfd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "157309b8-f7e0-498b-ab3f-90f05cd34dfd" (UID: "157309b8-f7e0-498b-ab3f-90f05cd34dfd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.615925 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157309b8-f7e0-498b-ab3f-90f05cd34dfd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.615955 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7g5xb\" (UniqueName: \"kubernetes.io/projected/157309b8-f7e0-498b-ab3f-90f05cd34dfd-kube-api-access-7g5xb\") on node \"crc\" DevicePath \"\"" Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.615968 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157309b8-f7e0-498b-ab3f-90f05cd34dfd-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.802081 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x2v88"] Jan 27 08:06:04 crc kubenswrapper[4787]: I0127 08:06:04.807643 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-x2v88"] Jan 27 08:06:05 crc kubenswrapper[4787]: I0127 08:06:05.086881 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="157309b8-f7e0-498b-ab3f-90f05cd34dfd" path="/var/lib/kubelet/pods/157309b8-f7e0-498b-ab3f-90f05cd34dfd/volumes" Jan 27 08:06:05 crc kubenswrapper[4787]: I0127 08:06:05.789467 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" Jan 27 08:06:05 crc kubenswrapper[4787]: I0127 08:06:05.837202 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9ba6b24a-de23-4528-87fc-c2932e3beb6e-util\") pod \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\" (UID: \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\") " Jan 27 08:06:05 crc kubenswrapper[4787]: I0127 08:06:05.837332 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9ba6b24a-de23-4528-87fc-c2932e3beb6e-bundle\") pod \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\" (UID: \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\") " Jan 27 08:06:05 crc kubenswrapper[4787]: I0127 08:06:05.837383 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpshb\" (UniqueName: \"kubernetes.io/projected/9ba6b24a-de23-4528-87fc-c2932e3beb6e-kube-api-access-vpshb\") pod \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\" (UID: \"9ba6b24a-de23-4528-87fc-c2932e3beb6e\") " Jan 27 08:06:05 crc kubenswrapper[4787]: I0127 08:06:05.839071 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ba6b24a-de23-4528-87fc-c2932e3beb6e-bundle" (OuterVolumeSpecName: "bundle") pod "9ba6b24a-de23-4528-87fc-c2932e3beb6e" (UID: "9ba6b24a-de23-4528-87fc-c2932e3beb6e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:06:05 crc kubenswrapper[4787]: I0127 08:06:05.845773 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ba6b24a-de23-4528-87fc-c2932e3beb6e-kube-api-access-vpshb" (OuterVolumeSpecName: "kube-api-access-vpshb") pod "9ba6b24a-de23-4528-87fc-c2932e3beb6e" (UID: "9ba6b24a-de23-4528-87fc-c2932e3beb6e"). InnerVolumeSpecName "kube-api-access-vpshb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:06:05 crc kubenswrapper[4787]: I0127 08:06:05.863091 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ba6b24a-de23-4528-87fc-c2932e3beb6e-util" (OuterVolumeSpecName: "util") pod "9ba6b24a-de23-4528-87fc-c2932e3beb6e" (UID: "9ba6b24a-de23-4528-87fc-c2932e3beb6e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:06:05 crc kubenswrapper[4787]: I0127 08:06:05.938437 4787 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9ba6b24a-de23-4528-87fc-c2932e3beb6e-util\") on node \"crc\" DevicePath \"\"" Jan 27 08:06:05 crc kubenswrapper[4787]: I0127 08:06:05.938475 4787 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9ba6b24a-de23-4528-87fc-c2932e3beb6e-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 08:06:05 crc kubenswrapper[4787]: I0127 08:06:05.938487 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpshb\" (UniqueName: \"kubernetes.io/projected/9ba6b24a-de23-4528-87fc-c2932e3beb6e-kube-api-access-vpshb\") on node \"crc\" DevicePath \"\"" Jan 27 08:06:06 crc kubenswrapper[4787]: I0127 08:06:06.489540 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" event={"ID":"9ba6b24a-de23-4528-87fc-c2932e3beb6e","Type":"ContainerDied","Data":"10f9fd4a391efed59786dfdf11a4ceeac9813a3b731165bbc936f972ec892204"} Jan 27 08:06:06 crc kubenswrapper[4787]: I0127 08:06:06.489752 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10f9fd4a391efed59786dfdf11a4ceeac9813a3b731165bbc936f972ec892204" Jan 27 08:06:06 crc kubenswrapper[4787]: I0127 08:06:06.489675 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.393053 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-mxzbt"] Jan 27 08:06:08 crc kubenswrapper[4787]: E0127 08:06:08.393757 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ba6b24a-de23-4528-87fc-c2932e3beb6e" containerName="pull" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.393771 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ba6b24a-de23-4528-87fc-c2932e3beb6e" containerName="pull" Jan 27 08:06:08 crc kubenswrapper[4787]: E0127 08:06:08.393786 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157309b8-f7e0-498b-ab3f-90f05cd34dfd" containerName="registry-server" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.393793 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="157309b8-f7e0-498b-ab3f-90f05cd34dfd" containerName="registry-server" Jan 27 08:06:08 crc kubenswrapper[4787]: E0127 08:06:08.393802 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ba6b24a-de23-4528-87fc-c2932e3beb6e" containerName="util" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.393809 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ba6b24a-de23-4528-87fc-c2932e3beb6e" containerName="util" Jan 27 08:06:08 crc kubenswrapper[4787]: E0127 08:06:08.393825 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ba6b24a-de23-4528-87fc-c2932e3beb6e" containerName="extract" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.393831 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ba6b24a-de23-4528-87fc-c2932e3beb6e" containerName="extract" Jan 27 08:06:08 crc kubenswrapper[4787]: E0127 08:06:08.393849 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157309b8-f7e0-498b-ab3f-90f05cd34dfd" containerName="extract-utilities" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.393855 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="157309b8-f7e0-498b-ab3f-90f05cd34dfd" containerName="extract-utilities" Jan 27 08:06:08 crc kubenswrapper[4787]: E0127 08:06:08.393865 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157309b8-f7e0-498b-ab3f-90f05cd34dfd" containerName="extract-content" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.393871 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="157309b8-f7e0-498b-ab3f-90f05cd34dfd" containerName="extract-content" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.393985 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ba6b24a-de23-4528-87fc-c2932e3beb6e" containerName="extract" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.394001 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="157309b8-f7e0-498b-ab3f-90f05cd34dfd" containerName="registry-server" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.394441 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-mxzbt" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.396772 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.396948 4787 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-q2bbt" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.397825 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.450607 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-mxzbt"] Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.472769 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mm7fr\" (UniqueName: \"kubernetes.io/projected/1f790347-7bae-4cc6-8b0b-956ecc8b0a41-kube-api-access-mm7fr\") pod \"cert-manager-operator-controller-manager-64cf6dff88-mxzbt\" (UID: \"1f790347-7bae-4cc6-8b0b-956ecc8b0a41\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-mxzbt" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.472847 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/1f790347-7bae-4cc6-8b0b-956ecc8b0a41-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-mxzbt\" (UID: \"1f790347-7bae-4cc6-8b0b-956ecc8b0a41\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-mxzbt" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.574394 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mm7fr\" (UniqueName: \"kubernetes.io/projected/1f790347-7bae-4cc6-8b0b-956ecc8b0a41-kube-api-access-mm7fr\") pod \"cert-manager-operator-controller-manager-64cf6dff88-mxzbt\" (UID: \"1f790347-7bae-4cc6-8b0b-956ecc8b0a41\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-mxzbt" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.574462 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/1f790347-7bae-4cc6-8b0b-956ecc8b0a41-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-mxzbt\" (UID: \"1f790347-7bae-4cc6-8b0b-956ecc8b0a41\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-mxzbt" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.574985 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/1f790347-7bae-4cc6-8b0b-956ecc8b0a41-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-mxzbt\" (UID: \"1f790347-7bae-4cc6-8b0b-956ecc8b0a41\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-mxzbt" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.597708 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mm7fr\" (UniqueName: \"kubernetes.io/projected/1f790347-7bae-4cc6-8b0b-956ecc8b0a41-kube-api-access-mm7fr\") pod \"cert-manager-operator-controller-manager-64cf6dff88-mxzbt\" (UID: \"1f790347-7bae-4cc6-8b0b-956ecc8b0a41\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-mxzbt" Jan 27 08:06:08 crc kubenswrapper[4787]: I0127 08:06:08.711770 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-mxzbt" Jan 27 08:06:09 crc kubenswrapper[4787]: I0127 08:06:09.041980 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-mxzbt"] Jan 27 08:06:09 crc kubenswrapper[4787]: I0127 08:06:09.513683 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-mxzbt" event={"ID":"1f790347-7bae-4cc6-8b0b-956ecc8b0a41","Type":"ContainerStarted","Data":"fa8269368f4ad340379bce26698e26209d945ebc33bf412edd182480b6803ef6"} Jan 27 08:06:16 crc kubenswrapper[4787]: I0127 08:06:16.567947 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-mxzbt" event={"ID":"1f790347-7bae-4cc6-8b0b-956ecc8b0a41","Type":"ContainerStarted","Data":"1e76dcb6d00d74c3f639e57d6437a647e90713da3c239d31bb4e0370ef6f94f3"} Jan 27 08:06:16 crc kubenswrapper[4787]: I0127 08:06:16.590883 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-mxzbt" podStartSLOduration=1.2664676 podStartE2EDuration="8.59086089s" podCreationTimestamp="2026-01-27 08:06:08 +0000 UTC" firstStartedPulling="2026-01-27 08:06:09.051688252 +0000 UTC m=+874.704043744" lastFinishedPulling="2026-01-27 08:06:16.376081502 +0000 UTC m=+882.028437034" observedRunningTime="2026-01-27 08:06:16.587424031 +0000 UTC m=+882.239779533" watchObservedRunningTime="2026-01-27 08:06:16.59086089 +0000 UTC m=+882.243216392" Jan 27 08:06:22 crc kubenswrapper[4787]: I0127 08:06:22.987930 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-lfhld"] Jan 27 08:06:22 crc kubenswrapper[4787]: I0127 08:06:22.989737 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-lfhld" Jan 27 08:06:22 crc kubenswrapper[4787]: I0127 08:06:22.993067 4787 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-dd48q" Jan 27 08:06:22 crc kubenswrapper[4787]: I0127 08:06:22.993098 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 27 08:06:22 crc kubenswrapper[4787]: I0127 08:06:22.993075 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.001313 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-lfhld"] Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.108477 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aff129cd-50c9-4f2a-b70d-d2066197a73b-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-lfhld\" (UID: \"aff129cd-50c9-4f2a-b70d-d2066197a73b\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-lfhld" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.108596 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m7cf\" (UniqueName: \"kubernetes.io/projected/aff129cd-50c9-4f2a-b70d-d2066197a73b-kube-api-access-6m7cf\") pod \"cert-manager-webhook-f4fb5df64-lfhld\" (UID: \"aff129cd-50c9-4f2a-b70d-d2066197a73b\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-lfhld" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.210406 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aff129cd-50c9-4f2a-b70d-d2066197a73b-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-lfhld\" (UID: \"aff129cd-50c9-4f2a-b70d-d2066197a73b\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-lfhld" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.210500 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m7cf\" (UniqueName: \"kubernetes.io/projected/aff129cd-50c9-4f2a-b70d-d2066197a73b-kube-api-access-6m7cf\") pod \"cert-manager-webhook-f4fb5df64-lfhld\" (UID: \"aff129cd-50c9-4f2a-b70d-d2066197a73b\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-lfhld" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.234881 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m7cf\" (UniqueName: \"kubernetes.io/projected/aff129cd-50c9-4f2a-b70d-d2066197a73b-kube-api-access-6m7cf\") pod \"cert-manager-webhook-f4fb5df64-lfhld\" (UID: \"aff129cd-50c9-4f2a-b70d-d2066197a73b\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-lfhld" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.237698 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aff129cd-50c9-4f2a-b70d-d2066197a73b-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-lfhld\" (UID: \"aff129cd-50c9-4f2a-b70d-d2066197a73b\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-lfhld" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.308313 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-lfhld" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.432928 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-k4x6h"] Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.438701 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-k4x6h" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.441892 4787 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-6zlbf" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.458142 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-k4x6h"] Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.518283 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/56feb43c-1e68-4770-94c0-e22943ca4313-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-k4x6h\" (UID: \"56feb43c-1e68-4770-94c0-e22943ca4313\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-k4x6h" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.518632 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvvnp\" (UniqueName: \"kubernetes.io/projected/56feb43c-1e68-4770-94c0-e22943ca4313-kube-api-access-hvvnp\") pod \"cert-manager-cainjector-855d9ccff4-k4x6h\" (UID: \"56feb43c-1e68-4770-94c0-e22943ca4313\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-k4x6h" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.620050 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/56feb43c-1e68-4770-94c0-e22943ca4313-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-k4x6h\" (UID: \"56feb43c-1e68-4770-94c0-e22943ca4313\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-k4x6h" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.620161 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvvnp\" (UniqueName: \"kubernetes.io/projected/56feb43c-1e68-4770-94c0-e22943ca4313-kube-api-access-hvvnp\") pod \"cert-manager-cainjector-855d9ccff4-k4x6h\" (UID: \"56feb43c-1e68-4770-94c0-e22943ca4313\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-k4x6h" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.642111 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvvnp\" (UniqueName: \"kubernetes.io/projected/56feb43c-1e68-4770-94c0-e22943ca4313-kube-api-access-hvvnp\") pod \"cert-manager-cainjector-855d9ccff4-k4x6h\" (UID: \"56feb43c-1e68-4770-94c0-e22943ca4313\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-k4x6h" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.642352 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/56feb43c-1e68-4770-94c0-e22943ca4313-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-k4x6h\" (UID: \"56feb43c-1e68-4770-94c0-e22943ca4313\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-k4x6h" Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.742626 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-lfhld"] Jan 27 08:06:23 crc kubenswrapper[4787]: W0127 08:06:23.754811 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaff129cd_50c9_4f2a_b70d_d2066197a73b.slice/crio-9cfc1afd1a852cd3af2bc57061b7546cceb15f0a01c2cdf0d4da91f0e44d3571 WatchSource:0}: Error finding container 9cfc1afd1a852cd3af2bc57061b7546cceb15f0a01c2cdf0d4da91f0e44d3571: Status 404 returned error can't find the container with id 9cfc1afd1a852cd3af2bc57061b7546cceb15f0a01c2cdf0d4da91f0e44d3571 Jan 27 08:06:23 crc kubenswrapper[4787]: I0127 08:06:23.781119 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-k4x6h" Jan 27 08:06:24 crc kubenswrapper[4787]: I0127 08:06:24.037730 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-k4x6h"] Jan 27 08:06:24 crc kubenswrapper[4787]: I0127 08:06:24.625589 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-k4x6h" event={"ID":"56feb43c-1e68-4770-94c0-e22943ca4313","Type":"ContainerStarted","Data":"ddb2971ae2abb1847af8ff7f3fe30349115a7e84994b616b0b5c5987592c4701"} Jan 27 08:06:24 crc kubenswrapper[4787]: I0127 08:06:24.626660 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-lfhld" event={"ID":"aff129cd-50c9-4f2a-b70d-d2066197a73b","Type":"ContainerStarted","Data":"9cfc1afd1a852cd3af2bc57061b7546cceb15f0a01c2cdf0d4da91f0e44d3571"} Jan 27 08:06:34 crc kubenswrapper[4787]: I0127 08:06:34.708980 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-k4x6h" event={"ID":"56feb43c-1e68-4770-94c0-e22943ca4313","Type":"ContainerStarted","Data":"d76093a0255b2cc26e714d8e067b83d63d070ebc6aadfc15e7d53c5195da95a1"} Jan 27 08:06:34 crc kubenswrapper[4787]: I0127 08:06:34.711980 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-lfhld" event={"ID":"aff129cd-50c9-4f2a-b70d-d2066197a73b","Type":"ContainerStarted","Data":"6a8147ec7b40a4cf0374c7e4a011f52b62883274ae45bca8372a496e00b7628a"} Jan 27 08:06:34 crc kubenswrapper[4787]: I0127 08:06:34.712168 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-lfhld" Jan 27 08:06:34 crc kubenswrapper[4787]: I0127 08:06:34.735011 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-k4x6h" podStartSLOduration=1.278180528 podStartE2EDuration="11.734968361s" podCreationTimestamp="2026-01-27 08:06:23 +0000 UTC" firstStartedPulling="2026-01-27 08:06:24.041405752 +0000 UTC m=+889.693761244" lastFinishedPulling="2026-01-27 08:06:34.498193585 +0000 UTC m=+900.150549077" observedRunningTime="2026-01-27 08:06:34.731527562 +0000 UTC m=+900.383883074" watchObservedRunningTime="2026-01-27 08:06:34.734968361 +0000 UTC m=+900.387323853" Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.072415 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-lfhld" podStartSLOduration=3.29412206 podStartE2EDuration="14.072386776s" podCreationTimestamp="2026-01-27 08:06:22 +0000 UTC" firstStartedPulling="2026-01-27 08:06:23.758736502 +0000 UTC m=+889.411091994" lastFinishedPulling="2026-01-27 08:06:34.537001218 +0000 UTC m=+900.189356710" observedRunningTime="2026-01-27 08:06:34.763407807 +0000 UTC m=+900.415763299" watchObservedRunningTime="2026-01-27 08:06:36.072386776 +0000 UTC m=+901.724742288" Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.078669 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5nrrg"] Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.080705 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.093010 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5nrrg"] Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.231540 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb2tf\" (UniqueName: \"kubernetes.io/projected/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-kube-api-access-mb2tf\") pod \"certified-operators-5nrrg\" (UID: \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\") " pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.231956 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-utilities\") pod \"certified-operators-5nrrg\" (UID: \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\") " pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.232071 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-catalog-content\") pod \"certified-operators-5nrrg\" (UID: \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\") " pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.334212 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-utilities\") pod \"certified-operators-5nrrg\" (UID: \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\") " pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.334339 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-catalog-content\") pod \"certified-operators-5nrrg\" (UID: \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\") " pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.334425 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb2tf\" (UniqueName: \"kubernetes.io/projected/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-kube-api-access-mb2tf\") pod \"certified-operators-5nrrg\" (UID: \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\") " pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.334847 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-utilities\") pod \"certified-operators-5nrrg\" (UID: \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\") " pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.335178 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-catalog-content\") pod \"certified-operators-5nrrg\" (UID: \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\") " pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.366143 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb2tf\" (UniqueName: \"kubernetes.io/projected/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-kube-api-access-mb2tf\") pod \"certified-operators-5nrrg\" (UID: \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\") " pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.449275 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:36 crc kubenswrapper[4787]: I0127 08:06:36.923264 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5nrrg"] Jan 27 08:06:37 crc kubenswrapper[4787]: I0127 08:06:37.737785 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nrrg" event={"ID":"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c","Type":"ContainerStarted","Data":"53cd4e630003b6798c9e5e8039883a25323c89f632886d1ab420bebf4450bb8f"} Jan 27 08:06:38 crc kubenswrapper[4787]: I0127 08:06:38.746541 4787 generic.go:334] "Generic (PLEG): container finished" podID="e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" containerID="b5e4cd5270b6e96f6cb6daa829d6d67b9d5b6ecd4d86e4f5b96e9b21d3b9dfd3" exitCode=0 Jan 27 08:06:38 crc kubenswrapper[4787]: I0127 08:06:38.746708 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nrrg" event={"ID":"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c","Type":"ContainerDied","Data":"b5e4cd5270b6e96f6cb6daa829d6d67b9d5b6ecd4d86e4f5b96e9b21d3b9dfd3"} Jan 27 08:06:39 crc kubenswrapper[4787]: I0127 08:06:39.766186 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-vbzqb"] Jan 27 08:06:39 crc kubenswrapper[4787]: I0127 08:06:39.767225 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-vbzqb" Jan 27 08:06:39 crc kubenswrapper[4787]: I0127 08:06:39.770861 4787 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-gqk8b" Jan 27 08:06:39 crc kubenswrapper[4787]: I0127 08:06:39.823643 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27mhv\" (UniqueName: \"kubernetes.io/projected/2219abac-7659-407a-b36f-cd0160e70c25-kube-api-access-27mhv\") pod \"cert-manager-86cb77c54b-vbzqb\" (UID: \"2219abac-7659-407a-b36f-cd0160e70c25\") " pod="cert-manager/cert-manager-86cb77c54b-vbzqb" Jan 27 08:06:39 crc kubenswrapper[4787]: I0127 08:06:39.823681 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2219abac-7659-407a-b36f-cd0160e70c25-bound-sa-token\") pod \"cert-manager-86cb77c54b-vbzqb\" (UID: \"2219abac-7659-407a-b36f-cd0160e70c25\") " pod="cert-manager/cert-manager-86cb77c54b-vbzqb" Jan 27 08:06:39 crc kubenswrapper[4787]: I0127 08:06:39.828130 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-vbzqb"] Jan 27 08:06:39 crc kubenswrapper[4787]: I0127 08:06:39.925246 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27mhv\" (UniqueName: \"kubernetes.io/projected/2219abac-7659-407a-b36f-cd0160e70c25-kube-api-access-27mhv\") pod \"cert-manager-86cb77c54b-vbzqb\" (UID: \"2219abac-7659-407a-b36f-cd0160e70c25\") " pod="cert-manager/cert-manager-86cb77c54b-vbzqb" Jan 27 08:06:39 crc kubenswrapper[4787]: I0127 08:06:39.925299 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2219abac-7659-407a-b36f-cd0160e70c25-bound-sa-token\") pod \"cert-manager-86cb77c54b-vbzqb\" (UID: \"2219abac-7659-407a-b36f-cd0160e70c25\") " pod="cert-manager/cert-manager-86cb77c54b-vbzqb" Jan 27 08:06:39 crc kubenswrapper[4787]: I0127 08:06:39.947918 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2219abac-7659-407a-b36f-cd0160e70c25-bound-sa-token\") pod \"cert-manager-86cb77c54b-vbzqb\" (UID: \"2219abac-7659-407a-b36f-cd0160e70c25\") " pod="cert-manager/cert-manager-86cb77c54b-vbzqb" Jan 27 08:06:39 crc kubenswrapper[4787]: I0127 08:06:39.948784 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27mhv\" (UniqueName: \"kubernetes.io/projected/2219abac-7659-407a-b36f-cd0160e70c25-kube-api-access-27mhv\") pod \"cert-manager-86cb77c54b-vbzqb\" (UID: \"2219abac-7659-407a-b36f-cd0160e70c25\") " pod="cert-manager/cert-manager-86cb77c54b-vbzqb" Jan 27 08:06:40 crc kubenswrapper[4787]: I0127 08:06:40.138323 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-vbzqb" Jan 27 08:06:40 crc kubenswrapper[4787]: I0127 08:06:40.574759 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-vbzqb"] Jan 27 08:06:40 crc kubenswrapper[4787]: W0127 08:06:40.577797 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2219abac_7659_407a_b36f_cd0160e70c25.slice/crio-7b58213c72e31a3d918d39506e7cee3aec8c402df4d927ef8924d3623b10494c WatchSource:0}: Error finding container 7b58213c72e31a3d918d39506e7cee3aec8c402df4d927ef8924d3623b10494c: Status 404 returned error can't find the container with id 7b58213c72e31a3d918d39506e7cee3aec8c402df4d927ef8924d3623b10494c Jan 27 08:06:40 crc kubenswrapper[4787]: I0127 08:06:40.768613 4787 generic.go:334] "Generic (PLEG): container finished" podID="e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" containerID="08b4f62655ac0067d486c33926cf62c3c39cdc3fe5830d59c54b976d0643b999" exitCode=0 Jan 27 08:06:40 crc kubenswrapper[4787]: I0127 08:06:40.768659 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nrrg" event={"ID":"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c","Type":"ContainerDied","Data":"08b4f62655ac0067d486c33926cf62c3c39cdc3fe5830d59c54b976d0643b999"} Jan 27 08:06:40 crc kubenswrapper[4787]: I0127 08:06:40.770543 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-vbzqb" event={"ID":"2219abac-7659-407a-b36f-cd0160e70c25","Type":"ContainerStarted","Data":"5debd36c6aeca9de3a9e1f468c032b83891e65e03e7a9cf75e4bc2f6ec4e4063"} Jan 27 08:06:40 crc kubenswrapper[4787]: I0127 08:06:40.770601 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-vbzqb" event={"ID":"2219abac-7659-407a-b36f-cd0160e70c25","Type":"ContainerStarted","Data":"7b58213c72e31a3d918d39506e7cee3aec8c402df4d927ef8924d3623b10494c"} Jan 27 08:06:40 crc kubenswrapper[4787]: I0127 08:06:40.821683 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-vbzqb" podStartSLOduration=1.821651228 podStartE2EDuration="1.821651228s" podCreationTimestamp="2026-01-27 08:06:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 08:06:40.813478592 +0000 UTC m=+906.465834094" watchObservedRunningTime="2026-01-27 08:06:40.821651228 +0000 UTC m=+906.474006750" Jan 27 08:06:42 crc kubenswrapper[4787]: I0127 08:06:42.787482 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nrrg" event={"ID":"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c","Type":"ContainerStarted","Data":"4d80e93e40acb24a06e931eb2842625cf5854351ef02789c3d3a51d8155a8f6e"} Jan 27 08:06:42 crc kubenswrapper[4787]: I0127 08:06:42.808956 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5nrrg" podStartSLOduration=3.371942396 podStartE2EDuration="6.808924206s" podCreationTimestamp="2026-01-27 08:06:36 +0000 UTC" firstStartedPulling="2026-01-27 08:06:38.749173196 +0000 UTC m=+904.401528688" lastFinishedPulling="2026-01-27 08:06:42.186155006 +0000 UTC m=+907.838510498" observedRunningTime="2026-01-27 08:06:42.804517745 +0000 UTC m=+908.456873247" watchObservedRunningTime="2026-01-27 08:06:42.808924206 +0000 UTC m=+908.461279698" Jan 27 08:06:43 crc kubenswrapper[4787]: I0127 08:06:43.313518 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-lfhld" Jan 27 08:06:46 crc kubenswrapper[4787]: I0127 08:06:46.450047 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:46 crc kubenswrapper[4787]: I0127 08:06:46.450509 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:46 crc kubenswrapper[4787]: I0127 08:06:46.535471 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:46 crc kubenswrapper[4787]: I0127 08:06:46.984674 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-jlpfk"] Jan 27 08:06:46 crc kubenswrapper[4787]: I0127 08:06:46.986730 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jlpfk" Jan 27 08:06:46 crc kubenswrapper[4787]: I0127 08:06:46.990209 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 27 08:06:46 crc kubenswrapper[4787]: I0127 08:06:46.990267 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 27 08:06:46 crc kubenswrapper[4787]: I0127 08:06:46.990439 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-l8j6d" Jan 27 08:06:46 crc kubenswrapper[4787]: I0127 08:06:46.994360 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jlpfk"] Jan 27 08:06:47 crc kubenswrapper[4787]: I0127 08:06:47.062764 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zv7q6\" (UniqueName: \"kubernetes.io/projected/c3c0535e-f30f-45de-9722-9431520772a2-kube-api-access-zv7q6\") pod \"openstack-operator-index-jlpfk\" (UID: \"c3c0535e-f30f-45de-9722-9431520772a2\") " pod="openstack-operators/openstack-operator-index-jlpfk" Jan 27 08:06:47 crc kubenswrapper[4787]: I0127 08:06:47.164182 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zv7q6\" (UniqueName: \"kubernetes.io/projected/c3c0535e-f30f-45de-9722-9431520772a2-kube-api-access-zv7q6\") pod \"openstack-operator-index-jlpfk\" (UID: \"c3c0535e-f30f-45de-9722-9431520772a2\") " pod="openstack-operators/openstack-operator-index-jlpfk" Jan 27 08:06:47 crc kubenswrapper[4787]: I0127 08:06:47.190083 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zv7q6\" (UniqueName: \"kubernetes.io/projected/c3c0535e-f30f-45de-9722-9431520772a2-kube-api-access-zv7q6\") pod \"openstack-operator-index-jlpfk\" (UID: \"c3c0535e-f30f-45de-9722-9431520772a2\") " pod="openstack-operators/openstack-operator-index-jlpfk" Jan 27 08:06:47 crc kubenswrapper[4787]: I0127 08:06:47.312623 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jlpfk" Jan 27 08:06:47 crc kubenswrapper[4787]: I0127 08:06:47.758538 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jlpfk"] Jan 27 08:06:47 crc kubenswrapper[4787]: I0127 08:06:47.828178 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jlpfk" event={"ID":"c3c0535e-f30f-45de-9722-9431520772a2","Type":"ContainerStarted","Data":"9cf1698799f8600b465f5d25e75ac44f2338a58601c45a62f35b4c50e4a809ef"} Jan 27 08:06:50 crc kubenswrapper[4787]: I0127 08:06:50.854530 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jlpfk" event={"ID":"c3c0535e-f30f-45de-9722-9431520772a2","Type":"ContainerStarted","Data":"2ae36a8c3cc95644299c340187a4c23740eecc3771632126a61345b654a07a93"} Jan 27 08:06:50 crc kubenswrapper[4787]: I0127 08:06:50.870156 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-jlpfk" podStartSLOduration=2.798281283 podStartE2EDuration="4.870135846s" podCreationTimestamp="2026-01-27 08:06:46 +0000 UTC" firstStartedPulling="2026-01-27 08:06:47.764645828 +0000 UTC m=+913.417001320" lastFinishedPulling="2026-01-27 08:06:49.836500381 +0000 UTC m=+915.488855883" observedRunningTime="2026-01-27 08:06:50.868802231 +0000 UTC m=+916.521157743" watchObservedRunningTime="2026-01-27 08:06:50.870135846 +0000 UTC m=+916.522491348" Jan 27 08:06:51 crc kubenswrapper[4787]: I0127 08:06:51.182229 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jlpfk"] Jan 27 08:06:51 crc kubenswrapper[4787]: I0127 08:06:51.785757 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-cnfx4"] Jan 27 08:06:51 crc kubenswrapper[4787]: I0127 08:06:51.786800 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cnfx4" Jan 27 08:06:51 crc kubenswrapper[4787]: I0127 08:06:51.802596 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-cnfx4"] Jan 27 08:06:51 crc kubenswrapper[4787]: I0127 08:06:51.848541 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8pc4\" (UniqueName: \"kubernetes.io/projected/ba39dacd-d1da-4a2b-a4b9-fa0e917986f0-kube-api-access-b8pc4\") pod \"openstack-operator-index-cnfx4\" (UID: \"ba39dacd-d1da-4a2b-a4b9-fa0e917986f0\") " pod="openstack-operators/openstack-operator-index-cnfx4" Jan 27 08:06:51 crc kubenswrapper[4787]: I0127 08:06:51.949698 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8pc4\" (UniqueName: \"kubernetes.io/projected/ba39dacd-d1da-4a2b-a4b9-fa0e917986f0-kube-api-access-b8pc4\") pod \"openstack-operator-index-cnfx4\" (UID: \"ba39dacd-d1da-4a2b-a4b9-fa0e917986f0\") " pod="openstack-operators/openstack-operator-index-cnfx4" Jan 27 08:06:51 crc kubenswrapper[4787]: I0127 08:06:51.974530 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8pc4\" (UniqueName: \"kubernetes.io/projected/ba39dacd-d1da-4a2b-a4b9-fa0e917986f0-kube-api-access-b8pc4\") pod \"openstack-operator-index-cnfx4\" (UID: \"ba39dacd-d1da-4a2b-a4b9-fa0e917986f0\") " pod="openstack-operators/openstack-operator-index-cnfx4" Jan 27 08:06:52 crc kubenswrapper[4787]: I0127 08:06:52.113154 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cnfx4" Jan 27 08:06:52 crc kubenswrapper[4787]: I0127 08:06:52.352223 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-cnfx4"] Jan 27 08:06:52 crc kubenswrapper[4787]: I0127 08:06:52.869626 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cnfx4" event={"ID":"ba39dacd-d1da-4a2b-a4b9-fa0e917986f0","Type":"ContainerStarted","Data":"24b2ba488be9122dc0ac8416758dc8226f35009874f61b62bbecc3dbfdde372d"} Jan 27 08:06:52 crc kubenswrapper[4787]: I0127 08:06:52.869962 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cnfx4" event={"ID":"ba39dacd-d1da-4a2b-a4b9-fa0e917986f0","Type":"ContainerStarted","Data":"189b45478376fa558633434a444ff60215cef664cdc5b40d3af9b5c540b3f261"} Jan 27 08:06:52 crc kubenswrapper[4787]: I0127 08:06:52.869774 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-jlpfk" podUID="c3c0535e-f30f-45de-9722-9431520772a2" containerName="registry-server" containerID="cri-o://2ae36a8c3cc95644299c340187a4c23740eecc3771632126a61345b654a07a93" gracePeriod=2 Jan 27 08:06:52 crc kubenswrapper[4787]: I0127 08:06:52.895491 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-cnfx4" podStartSLOduration=1.838866024 podStartE2EDuration="1.895469774s" podCreationTimestamp="2026-01-27 08:06:51 +0000 UTC" firstStartedPulling="2026-01-27 08:06:52.382192254 +0000 UTC m=+918.034547746" lastFinishedPulling="2026-01-27 08:06:52.438795994 +0000 UTC m=+918.091151496" observedRunningTime="2026-01-27 08:06:52.893160342 +0000 UTC m=+918.545515864" watchObservedRunningTime="2026-01-27 08:06:52.895469774 +0000 UTC m=+918.547825266" Jan 27 08:06:53 crc kubenswrapper[4787]: I0127 08:06:53.280435 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jlpfk" Jan 27 08:06:53 crc kubenswrapper[4787]: I0127 08:06:53.377504 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zv7q6\" (UniqueName: \"kubernetes.io/projected/c3c0535e-f30f-45de-9722-9431520772a2-kube-api-access-zv7q6\") pod \"c3c0535e-f30f-45de-9722-9431520772a2\" (UID: \"c3c0535e-f30f-45de-9722-9431520772a2\") " Jan 27 08:06:53 crc kubenswrapper[4787]: I0127 08:06:53.385717 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3c0535e-f30f-45de-9722-9431520772a2-kube-api-access-zv7q6" (OuterVolumeSpecName: "kube-api-access-zv7q6") pod "c3c0535e-f30f-45de-9722-9431520772a2" (UID: "c3c0535e-f30f-45de-9722-9431520772a2"). InnerVolumeSpecName "kube-api-access-zv7q6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:06:53 crc kubenswrapper[4787]: I0127 08:06:53.479412 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zv7q6\" (UniqueName: \"kubernetes.io/projected/c3c0535e-f30f-45de-9722-9431520772a2-kube-api-access-zv7q6\") on node \"crc\" DevicePath \"\"" Jan 27 08:06:53 crc kubenswrapper[4787]: I0127 08:06:53.879250 4787 generic.go:334] "Generic (PLEG): container finished" podID="c3c0535e-f30f-45de-9722-9431520772a2" containerID="2ae36a8c3cc95644299c340187a4c23740eecc3771632126a61345b654a07a93" exitCode=0 Jan 27 08:06:53 crc kubenswrapper[4787]: I0127 08:06:53.879357 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jlpfk" Jan 27 08:06:53 crc kubenswrapper[4787]: I0127 08:06:53.879368 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jlpfk" event={"ID":"c3c0535e-f30f-45de-9722-9431520772a2","Type":"ContainerDied","Data":"2ae36a8c3cc95644299c340187a4c23740eecc3771632126a61345b654a07a93"} Jan 27 08:06:53 crc kubenswrapper[4787]: I0127 08:06:53.879454 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jlpfk" event={"ID":"c3c0535e-f30f-45de-9722-9431520772a2","Type":"ContainerDied","Data":"9cf1698799f8600b465f5d25e75ac44f2338a58601c45a62f35b4c50e4a809ef"} Jan 27 08:06:53 crc kubenswrapper[4787]: I0127 08:06:53.879485 4787 scope.go:117] "RemoveContainer" containerID="2ae36a8c3cc95644299c340187a4c23740eecc3771632126a61345b654a07a93" Jan 27 08:06:53 crc kubenswrapper[4787]: I0127 08:06:53.901905 4787 scope.go:117] "RemoveContainer" containerID="2ae36a8c3cc95644299c340187a4c23740eecc3771632126a61345b654a07a93" Jan 27 08:06:53 crc kubenswrapper[4787]: E0127 08:06:53.902442 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ae36a8c3cc95644299c340187a4c23740eecc3771632126a61345b654a07a93\": container with ID starting with 2ae36a8c3cc95644299c340187a4c23740eecc3771632126a61345b654a07a93 not found: ID does not exist" containerID="2ae36a8c3cc95644299c340187a4c23740eecc3771632126a61345b654a07a93" Jan 27 08:06:53 crc kubenswrapper[4787]: I0127 08:06:53.902520 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ae36a8c3cc95644299c340187a4c23740eecc3771632126a61345b654a07a93"} err="failed to get container status \"2ae36a8c3cc95644299c340187a4c23740eecc3771632126a61345b654a07a93\": rpc error: code = NotFound desc = could not find container \"2ae36a8c3cc95644299c340187a4c23740eecc3771632126a61345b654a07a93\": container with ID starting with 2ae36a8c3cc95644299c340187a4c23740eecc3771632126a61345b654a07a93 not found: ID does not exist" Jan 27 08:06:53 crc kubenswrapper[4787]: I0127 08:06:53.918184 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jlpfk"] Jan 27 08:06:53 crc kubenswrapper[4787]: I0127 08:06:53.924487 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-jlpfk"] Jan 27 08:06:55 crc kubenswrapper[4787]: I0127 08:06:55.086103 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3c0535e-f30f-45de-9722-9431520772a2" path="/var/lib/kubelet/pods/c3c0535e-f30f-45de-9722-9431520772a2/volumes" Jan 27 08:06:56 crc kubenswrapper[4787]: I0127 08:06:56.497512 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:58 crc kubenswrapper[4787]: I0127 08:06:58.771532 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5nrrg"] Jan 27 08:06:58 crc kubenswrapper[4787]: I0127 08:06:58.772303 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5nrrg" podUID="e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" containerName="registry-server" containerID="cri-o://4d80e93e40acb24a06e931eb2842625cf5854351ef02789c3d3a51d8155a8f6e" gracePeriod=2 Jan 27 08:06:58 crc kubenswrapper[4787]: I0127 08:06:58.922562 4787 generic.go:334] "Generic (PLEG): container finished" podID="e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" containerID="4d80e93e40acb24a06e931eb2842625cf5854351ef02789c3d3a51d8155a8f6e" exitCode=0 Jan 27 08:06:58 crc kubenswrapper[4787]: I0127 08:06:58.922623 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nrrg" event={"ID":"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c","Type":"ContainerDied","Data":"4d80e93e40acb24a06e931eb2842625cf5854351ef02789c3d3a51d8155a8f6e"} Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.206427 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.261791 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-utilities\") pod \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\" (UID: \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\") " Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.261925 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mb2tf\" (UniqueName: \"kubernetes.io/projected/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-kube-api-access-mb2tf\") pod \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\" (UID: \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\") " Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.262443 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-catalog-content\") pod \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\" (UID: \"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c\") " Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.263038 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-utilities" (OuterVolumeSpecName: "utilities") pod "e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" (UID: "e78c8f6a-3cd2-4fbc-af78-3fbaba41070c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.270064 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-kube-api-access-mb2tf" (OuterVolumeSpecName: "kube-api-access-mb2tf") pod "e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" (UID: "e78c8f6a-3cd2-4fbc-af78-3fbaba41070c"). InnerVolumeSpecName "kube-api-access-mb2tf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.312778 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" (UID: "e78c8f6a-3cd2-4fbc-af78-3fbaba41070c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.364410 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.364457 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.364471 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mb2tf\" (UniqueName: \"kubernetes.io/projected/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c-kube-api-access-mb2tf\") on node \"crc\" DevicePath \"\"" Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.933168 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nrrg" event={"ID":"e78c8f6a-3cd2-4fbc-af78-3fbaba41070c","Type":"ContainerDied","Data":"53cd4e630003b6798c9e5e8039883a25323c89f632886d1ab420bebf4450bb8f"} Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.933266 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5nrrg" Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.933721 4787 scope.go:117] "RemoveContainer" containerID="4d80e93e40acb24a06e931eb2842625cf5854351ef02789c3d3a51d8155a8f6e" Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.966842 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5nrrg"] Jan 27 08:06:59 crc kubenswrapper[4787]: I0127 08:06:59.978525 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5nrrg"] Jan 27 08:07:00 crc kubenswrapper[4787]: I0127 08:07:00.125649 4787 scope.go:117] "RemoveContainer" containerID="08b4f62655ac0067d486c33926cf62c3c39cdc3fe5830d59c54b976d0643b999" Jan 27 08:07:00 crc kubenswrapper[4787]: I0127 08:07:00.143648 4787 scope.go:117] "RemoveContainer" containerID="b5e4cd5270b6e96f6cb6daa829d6d67b9d5b6ecd4d86e4f5b96e9b21d3b9dfd3" Jan 27 08:07:01 crc kubenswrapper[4787]: I0127 08:07:01.090021 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" path="/var/lib/kubelet/pods/e78c8f6a-3cd2-4fbc-af78-3fbaba41070c/volumes" Jan 27 08:07:02 crc kubenswrapper[4787]: I0127 08:07:02.113750 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-cnfx4" Jan 27 08:07:02 crc kubenswrapper[4787]: I0127 08:07:02.114860 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-cnfx4" Jan 27 08:07:02 crc kubenswrapper[4787]: I0127 08:07:02.157114 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-cnfx4" Jan 27 08:07:02 crc kubenswrapper[4787]: I0127 08:07:02.986523 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-cnfx4" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.511729 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g"] Jan 27 08:07:10 crc kubenswrapper[4787]: E0127 08:07:10.512989 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" containerName="registry-server" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.513017 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" containerName="registry-server" Jan 27 08:07:10 crc kubenswrapper[4787]: E0127 08:07:10.513048 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3c0535e-f30f-45de-9722-9431520772a2" containerName="registry-server" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.513057 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3c0535e-f30f-45de-9722-9431520772a2" containerName="registry-server" Jan 27 08:07:10 crc kubenswrapper[4787]: E0127 08:07:10.513071 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" containerName="extract-content" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.513080 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" containerName="extract-content" Jan 27 08:07:10 crc kubenswrapper[4787]: E0127 08:07:10.513097 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" containerName="extract-utilities" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.513105 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" containerName="extract-utilities" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.513295 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="e78c8f6a-3cd2-4fbc-af78-3fbaba41070c" containerName="registry-server" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.513339 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3c0535e-f30f-45de-9722-9431520772a2" containerName="registry-server" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.515309 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.518854 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7z9mk" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.522387 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g"] Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.557275 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm2lq\" (UniqueName: \"kubernetes.io/projected/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-kube-api-access-rm2lq\") pod \"fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g\" (UID: \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\") " pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.557521 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-bundle\") pod \"fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g\" (UID: \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\") " pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.557667 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-util\") pod \"fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g\" (UID: \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\") " pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.659829 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm2lq\" (UniqueName: \"kubernetes.io/projected/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-kube-api-access-rm2lq\") pod \"fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g\" (UID: \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\") " pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.659992 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-bundle\") pod \"fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g\" (UID: \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\") " pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.660054 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-util\") pod \"fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g\" (UID: \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\") " pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.661136 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-util\") pod \"fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g\" (UID: \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\") " pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.661141 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-bundle\") pod \"fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g\" (UID: \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\") " pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.685142 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm2lq\" (UniqueName: \"kubernetes.io/projected/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-kube-api-access-rm2lq\") pod \"fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g\" (UID: \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\") " pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" Jan 27 08:07:10 crc kubenswrapper[4787]: I0127 08:07:10.836459 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" Jan 27 08:07:11 crc kubenswrapper[4787]: I0127 08:07:11.355878 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g"] Jan 27 08:07:12 crc kubenswrapper[4787]: I0127 08:07:12.020474 4787 generic.go:334] "Generic (PLEG): container finished" podID="f63e7c4f-d194-4209-a8ac-f67c9dc7dde1" containerID="a9f1fd61431cb95f96b0c4b871f3a6bb8ea404beadec1c1139de236e15c36b11" exitCode=0 Jan 27 08:07:12 crc kubenswrapper[4787]: I0127 08:07:12.020618 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" event={"ID":"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1","Type":"ContainerDied","Data":"a9f1fd61431cb95f96b0c4b871f3a6bb8ea404beadec1c1139de236e15c36b11"} Jan 27 08:07:12 crc kubenswrapper[4787]: I0127 08:07:12.020708 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" event={"ID":"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1","Type":"ContainerStarted","Data":"1db295cc6731d3f08dfb5af5f597d84207441910100091f0cea6351873c9986d"} Jan 27 08:07:13 crc kubenswrapper[4787]: I0127 08:07:13.053531 4787 generic.go:334] "Generic (PLEG): container finished" podID="f63e7c4f-d194-4209-a8ac-f67c9dc7dde1" containerID="a4074cf2dabef95322eb20a835884d5b51098cda722a7c7f95a1971f131bf0a9" exitCode=0 Jan 27 08:07:13 crc kubenswrapper[4787]: I0127 08:07:13.054778 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" event={"ID":"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1","Type":"ContainerDied","Data":"a4074cf2dabef95322eb20a835884d5b51098cda722a7c7f95a1971f131bf0a9"} Jan 27 08:07:14 crc kubenswrapper[4787]: I0127 08:07:14.065491 4787 generic.go:334] "Generic (PLEG): container finished" podID="f63e7c4f-d194-4209-a8ac-f67c9dc7dde1" containerID="9a028a61db959db2d5e788fbb0261eddaa148c4f87d38dfac8bb17301f06f374" exitCode=0 Jan 27 08:07:14 crc kubenswrapper[4787]: I0127 08:07:14.065615 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" event={"ID":"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1","Type":"ContainerDied","Data":"9a028a61db959db2d5e788fbb0261eddaa148c4f87d38dfac8bb17301f06f374"} Jan 27 08:07:15 crc kubenswrapper[4787]: I0127 08:07:15.385911 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" Jan 27 08:07:15 crc kubenswrapper[4787]: I0127 08:07:15.432576 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-util\") pod \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\" (UID: \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\") " Jan 27 08:07:15 crc kubenswrapper[4787]: I0127 08:07:15.432679 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rm2lq\" (UniqueName: \"kubernetes.io/projected/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-kube-api-access-rm2lq\") pod \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\" (UID: \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\") " Jan 27 08:07:15 crc kubenswrapper[4787]: I0127 08:07:15.432750 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-bundle\") pod \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\" (UID: \"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1\") " Jan 27 08:07:15 crc kubenswrapper[4787]: I0127 08:07:15.433910 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-bundle" (OuterVolumeSpecName: "bundle") pod "f63e7c4f-d194-4209-a8ac-f67c9dc7dde1" (UID: "f63e7c4f-d194-4209-a8ac-f67c9dc7dde1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:07:15 crc kubenswrapper[4787]: I0127 08:07:15.441060 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-kube-api-access-rm2lq" (OuterVolumeSpecName: "kube-api-access-rm2lq") pod "f63e7c4f-d194-4209-a8ac-f67c9dc7dde1" (UID: "f63e7c4f-d194-4209-a8ac-f67c9dc7dde1"). InnerVolumeSpecName "kube-api-access-rm2lq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:07:15 crc kubenswrapper[4787]: I0127 08:07:15.452062 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-util" (OuterVolumeSpecName: "util") pod "f63e7c4f-d194-4209-a8ac-f67c9dc7dde1" (UID: "f63e7c4f-d194-4209-a8ac-f67c9dc7dde1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:07:15 crc kubenswrapper[4787]: I0127 08:07:15.533984 4787 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-util\") on node \"crc\" DevicePath \"\"" Jan 27 08:07:15 crc kubenswrapper[4787]: I0127 08:07:15.534392 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rm2lq\" (UniqueName: \"kubernetes.io/projected/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-kube-api-access-rm2lq\") on node \"crc\" DevicePath \"\"" Jan 27 08:07:15 crc kubenswrapper[4787]: I0127 08:07:15.534467 4787 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f63e7c4f-d194-4209-a8ac-f67c9dc7dde1-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 08:07:16 crc kubenswrapper[4787]: I0127 08:07:16.080783 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" event={"ID":"f63e7c4f-d194-4209-a8ac-f67c9dc7dde1","Type":"ContainerDied","Data":"1db295cc6731d3f08dfb5af5f597d84207441910100091f0cea6351873c9986d"} Jan 27 08:07:16 crc kubenswrapper[4787]: I0127 08:07:16.080829 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1db295cc6731d3f08dfb5af5f597d84207441910100091f0cea6351873c9986d" Jan 27 08:07:16 crc kubenswrapper[4787]: I0127 08:07:16.080886 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g" Jan 27 08:07:23 crc kubenswrapper[4787]: I0127 08:07:23.355754 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq"] Jan 27 08:07:23 crc kubenswrapper[4787]: E0127 08:07:23.357401 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f63e7c4f-d194-4209-a8ac-f67c9dc7dde1" containerName="util" Jan 27 08:07:23 crc kubenswrapper[4787]: I0127 08:07:23.357467 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="f63e7c4f-d194-4209-a8ac-f67c9dc7dde1" containerName="util" Jan 27 08:07:23 crc kubenswrapper[4787]: E0127 08:07:23.357505 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f63e7c4f-d194-4209-a8ac-f67c9dc7dde1" containerName="pull" Jan 27 08:07:23 crc kubenswrapper[4787]: I0127 08:07:23.357512 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="f63e7c4f-d194-4209-a8ac-f67c9dc7dde1" containerName="pull" Jan 27 08:07:23 crc kubenswrapper[4787]: E0127 08:07:23.357527 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f63e7c4f-d194-4209-a8ac-f67c9dc7dde1" containerName="extract" Jan 27 08:07:23 crc kubenswrapper[4787]: I0127 08:07:23.357536 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="f63e7c4f-d194-4209-a8ac-f67c9dc7dde1" containerName="extract" Jan 27 08:07:23 crc kubenswrapper[4787]: I0127 08:07:23.357862 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="f63e7c4f-d194-4209-a8ac-f67c9dc7dde1" containerName="extract" Jan 27 08:07:23 crc kubenswrapper[4787]: I0127 08:07:23.360540 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" Jan 27 08:07:23 crc kubenswrapper[4787]: I0127 08:07:23.364966 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq"] Jan 27 08:07:23 crc kubenswrapper[4787]: I0127 08:07:23.366378 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-7jtsz" Jan 27 08:07:23 crc kubenswrapper[4787]: I0127 08:07:23.369037 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lftt5\" (UniqueName: \"kubernetes.io/projected/86e4bb51-0d54-49f8-b158-8338d845d92e-kube-api-access-lftt5\") pod \"openstack-operator-controller-init-59cc4f5964-ch4bq\" (UID: \"86e4bb51-0d54-49f8-b158-8338d845d92e\") " pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" Jan 27 08:07:23 crc kubenswrapper[4787]: I0127 08:07:23.470985 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lftt5\" (UniqueName: \"kubernetes.io/projected/86e4bb51-0d54-49f8-b158-8338d845d92e-kube-api-access-lftt5\") pod \"openstack-operator-controller-init-59cc4f5964-ch4bq\" (UID: \"86e4bb51-0d54-49f8-b158-8338d845d92e\") " pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" Jan 27 08:07:23 crc kubenswrapper[4787]: I0127 08:07:23.492044 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lftt5\" (UniqueName: \"kubernetes.io/projected/86e4bb51-0d54-49f8-b158-8338d845d92e-kube-api-access-lftt5\") pod \"openstack-operator-controller-init-59cc4f5964-ch4bq\" (UID: \"86e4bb51-0d54-49f8-b158-8338d845d92e\") " pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" Jan 27 08:07:23 crc kubenswrapper[4787]: I0127 08:07:23.687503 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" Jan 27 08:07:24 crc kubenswrapper[4787]: I0127 08:07:24.192260 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq"] Jan 27 08:07:25 crc kubenswrapper[4787]: I0127 08:07:25.147895 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" event={"ID":"86e4bb51-0d54-49f8-b158-8338d845d92e","Type":"ContainerStarted","Data":"0a86cb2179439de11cc5dfa2c7d576a7b85ad83b8b80b1dd922ae9e6f93584cf"} Jan 27 08:07:29 crc kubenswrapper[4787]: I0127 08:07:29.178925 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" event={"ID":"86e4bb51-0d54-49f8-b158-8338d845d92e","Type":"ContainerStarted","Data":"5cbc51ae998c8ec5e3d568f273050ca82bb17cea0dd97580f6bd144d80fe9b5f"} Jan 27 08:07:29 crc kubenswrapper[4787]: I0127 08:07:29.179325 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" Jan 27 08:07:29 crc kubenswrapper[4787]: I0127 08:07:29.213726 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" podStartSLOduration=2.115656354 podStartE2EDuration="6.213700132s" podCreationTimestamp="2026-01-27 08:07:23 +0000 UTC" firstStartedPulling="2026-01-27 08:07:24.217443135 +0000 UTC m=+949.869798617" lastFinishedPulling="2026-01-27 08:07:28.315486903 +0000 UTC m=+953.967842395" observedRunningTime="2026-01-27 08:07:29.207870834 +0000 UTC m=+954.860226336" watchObservedRunningTime="2026-01-27 08:07:29.213700132 +0000 UTC m=+954.866055624" Jan 27 08:07:33 crc kubenswrapper[4787]: I0127 08:07:33.691914 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.102522 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-65ff799cfd-d5r5z"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.103976 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-65ff799cfd-d5r5z" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.106343 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-fqkfl" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.115204 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-655bf9cfbb-6tnpg"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.116276 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-655bf9cfbb-6tnpg" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.133300 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-6tq26" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.139113 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-655bf9cfbb-6tnpg"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.154392 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-65ff799cfd-d5r5z"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.175639 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-77554cdc5c-627g6"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.176707 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-77554cdc5c-627g6" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.182472 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-88lqz" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.196284 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-67dd55ff59-kk6mj"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.197614 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-67dd55ff59-kk6mj" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.202907 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-2w96c" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.213194 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-77554cdc5c-627g6"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.219386 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-67dd55ff59-kk6mj"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.222789 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.224114 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.228631 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-tj4rr" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.231164 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfgz4\" (UniqueName: \"kubernetes.io/projected/85bafad5-30f3-4931-bd2a-0d45e2b0f844-kube-api-access-bfgz4\") pod \"cinder-operator-controller-manager-655bf9cfbb-6tnpg\" (UID: \"85bafad5-30f3-4931-bd2a-0d45e2b0f844\") " pod="openstack-operators/cinder-operator-controller-manager-655bf9cfbb-6tnpg" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.231222 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bk7bm\" (UniqueName: \"kubernetes.io/projected/4b5515cb-f539-4a9b-8c46-36c5c62c5c93-kube-api-access-bk7bm\") pod \"barbican-operator-controller-manager-65ff799cfd-d5r5z\" (UID: \"4b5515cb-f539-4a9b-8c46-36c5c62c5c93\") " pod="openstack-operators/barbican-operator-controller-manager-65ff799cfd-d5r5z" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.231748 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.233124 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.235194 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-jdq8b" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.246652 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.266064 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.275857 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.276963 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.281027 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-4j2xf" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.281257 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.297694 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-768b776ffb-j7klk"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.298763 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-768b776ffb-j7klk" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.305383 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.309574 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-qpx2f" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.310592 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.311563 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.314311 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-44fx6" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.323689 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-768b776ffb-j7klk"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.380374 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.400077 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzws7\" (UniqueName: \"kubernetes.io/projected/32019c4d-61e3-4c27-86d4-3a79bb40ce70-kube-api-access-jzws7\") pod \"keystone-operator-controller-manager-55f684fd56-2k8pd\" (UID: \"32019c4d-61e3-4c27-86d4-3a79bb40ce70\") " pod="openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.400140 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gp8m\" (UniqueName: \"kubernetes.io/projected/dd8dbc24-8fb8-4ae2-96c3-b7112c7d7c65-kube-api-access-5gp8m\") pod \"designate-operator-controller-manager-77554cdc5c-627g6\" (UID: \"dd8dbc24-8fb8-4ae2-96c3-b7112c7d7c65\") " pod="openstack-operators/designate-operator-controller-manager-77554cdc5c-627g6" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.400207 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jb46d\" (UniqueName: \"kubernetes.io/projected/a21cc0b9-75d2-4ddb-925e-23eeaac2dd35-kube-api-access-jb46d\") pod \"horizon-operator-controller-manager-77d5c5b54f-vkmnm\" (UID: \"a21cc0b9-75d2-4ddb-925e-23eeaac2dd35\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.400260 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsqjb\" (UniqueName: \"kubernetes.io/projected/be4be3e5-1589-4e84-9d35-f104bc3d5ad4-kube-api-access-wsqjb\") pod \"glance-operator-controller-manager-67dd55ff59-kk6mj\" (UID: \"be4be3e5-1589-4e84-9d35-f104bc3d5ad4\") " pod="openstack-operators/glance-operator-controller-manager-67dd55ff59-kk6mj" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.400279 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6z7sm\" (UniqueName: \"kubernetes.io/projected/ff52e5a2-2aab-451a-8869-72c1a506940a-kube-api-access-6z7sm\") pod \"ironic-operator-controller-manager-768b776ffb-j7klk\" (UID: \"ff52e5a2-2aab-451a-8869-72c1a506940a\") " pod="openstack-operators/ironic-operator-controller-manager-768b776ffb-j7klk" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.400299 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csw77\" (UniqueName: \"kubernetes.io/projected/c634491f-6069-472d-bff7-d8903d0afa1d-kube-api-access-csw77\") pod \"infra-operator-controller-manager-7d75bc88d5-drtqw\" (UID: \"c634491f-6069-472d-bff7-d8903d0afa1d\") " pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.400342 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfgz4\" (UniqueName: \"kubernetes.io/projected/85bafad5-30f3-4931-bd2a-0d45e2b0f844-kube-api-access-bfgz4\") pod \"cinder-operator-controller-manager-655bf9cfbb-6tnpg\" (UID: \"85bafad5-30f3-4931-bd2a-0d45e2b0f844\") " pod="openstack-operators/cinder-operator-controller-manager-655bf9cfbb-6tnpg" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.400371 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv4mh\" (UniqueName: \"kubernetes.io/projected/f621b9f3-ead9-4fab-b33f-f9d6179e8f3f-kube-api-access-qv4mh\") pod \"heat-operator-controller-manager-575ffb885b-57zhq\" (UID: \"f621b9f3-ead9-4fab-b33f-f9d6179e8f3f\") " pod="openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.400399 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bk7bm\" (UniqueName: \"kubernetes.io/projected/4b5515cb-f539-4a9b-8c46-36c5c62c5c93-kube-api-access-bk7bm\") pod \"barbican-operator-controller-manager-65ff799cfd-d5r5z\" (UID: \"4b5515cb-f539-4a9b-8c46-36c5c62c5c93\") " pod="openstack-operators/barbican-operator-controller-manager-65ff799cfd-d5r5z" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.400436 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert\") pod \"infra-operator-controller-manager-7d75bc88d5-drtqw\" (UID: \"c634491f-6069-472d-bff7-d8903d0afa1d\") " pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.428897 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-849fcfbb6b-tbr5m"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.430581 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-849fcfbb6b-tbr5m" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.435687 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-lc2ft" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.437441 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bk7bm\" (UniqueName: \"kubernetes.io/projected/4b5515cb-f539-4a9b-8c46-36c5c62c5c93-kube-api-access-bk7bm\") pod \"barbican-operator-controller-manager-65ff799cfd-d5r5z\" (UID: \"4b5515cb-f539-4a9b-8c46-36c5c62c5c93\") " pod="openstack-operators/barbican-operator-controller-manager-65ff799cfd-d5r5z" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.441019 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfgz4\" (UniqueName: \"kubernetes.io/projected/85bafad5-30f3-4931-bd2a-0d45e2b0f844-kube-api-access-bfgz4\") pod \"cinder-operator-controller-manager-655bf9cfbb-6tnpg\" (UID: \"85bafad5-30f3-4931-bd2a-0d45e2b0f844\") " pod="openstack-operators/cinder-operator-controller-manager-655bf9cfbb-6tnpg" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.444129 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-88gn4"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.445297 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-88gn4" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.448810 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-655bf9cfbb-6tnpg" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.455212 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.456716 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.460128 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-88gn4"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.474714 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-849fcfbb6b-tbr5m"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.485537 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.489945 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-kmxgn" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.490270 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-cdz4s" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.499891 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.501197 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.507528 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzws7\" (UniqueName: \"kubernetes.io/projected/32019c4d-61e3-4c27-86d4-3a79bb40ce70-kube-api-access-jzws7\") pod \"keystone-operator-controller-manager-55f684fd56-2k8pd\" (UID: \"32019c4d-61e3-4c27-86d4-3a79bb40ce70\") " pod="openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.507583 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gp8m\" (UniqueName: \"kubernetes.io/projected/dd8dbc24-8fb8-4ae2-96c3-b7112c7d7c65-kube-api-access-5gp8m\") pod \"designate-operator-controller-manager-77554cdc5c-627g6\" (UID: \"dd8dbc24-8fb8-4ae2-96c3-b7112c7d7c65\") " pod="openstack-operators/designate-operator-controller-manager-77554cdc5c-627g6" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.508600 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-d9cbf" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.508746 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jb46d\" (UniqueName: \"kubernetes.io/projected/a21cc0b9-75d2-4ddb-925e-23eeaac2dd35-kube-api-access-jb46d\") pod \"horizon-operator-controller-manager-77d5c5b54f-vkmnm\" (UID: \"a21cc0b9-75d2-4ddb-925e-23eeaac2dd35\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.508846 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsqjb\" (UniqueName: \"kubernetes.io/projected/be4be3e5-1589-4e84-9d35-f104bc3d5ad4-kube-api-access-wsqjb\") pod \"glance-operator-controller-manager-67dd55ff59-kk6mj\" (UID: \"be4be3e5-1589-4e84-9d35-f104bc3d5ad4\") " pod="openstack-operators/glance-operator-controller-manager-67dd55ff59-kk6mj" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.508888 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6z7sm\" (UniqueName: \"kubernetes.io/projected/ff52e5a2-2aab-451a-8869-72c1a506940a-kube-api-access-6z7sm\") pod \"ironic-operator-controller-manager-768b776ffb-j7klk\" (UID: \"ff52e5a2-2aab-451a-8869-72c1a506940a\") " pod="openstack-operators/ironic-operator-controller-manager-768b776ffb-j7klk" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.508916 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csw77\" (UniqueName: \"kubernetes.io/projected/c634491f-6069-472d-bff7-d8903d0afa1d-kube-api-access-csw77\") pod \"infra-operator-controller-manager-7d75bc88d5-drtqw\" (UID: \"c634491f-6069-472d-bff7-d8903d0afa1d\") " pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.508961 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv4mh\" (UniqueName: \"kubernetes.io/projected/f621b9f3-ead9-4fab-b33f-f9d6179e8f3f-kube-api-access-qv4mh\") pod \"heat-operator-controller-manager-575ffb885b-57zhq\" (UID: \"f621b9f3-ead9-4fab-b33f-f9d6179e8f3f\") " pod="openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.509020 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert\") pod \"infra-operator-controller-manager-7d75bc88d5-drtqw\" (UID: \"c634491f-6069-472d-bff7-d8903d0afa1d\") " pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:07:52 crc kubenswrapper[4787]: E0127 08:07:52.509162 4787 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 27 08:07:52 crc kubenswrapper[4787]: E0127 08:07:52.509237 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert podName:c634491f-6069-472d-bff7-d8903d0afa1d nodeName:}" failed. No retries permitted until 2026-01-27 08:07:53.009210412 +0000 UTC m=+978.661565904 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert") pod "infra-operator-controller-manager-7d75bc88d5-drtqw" (UID: "c634491f-6069-472d-bff7-d8903d0afa1d") : secret "infra-operator-webhook-server-cert" not found Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.523674 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.524719 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.555537 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-z9xlm" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.568201 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.573806 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsqjb\" (UniqueName: \"kubernetes.io/projected/be4be3e5-1589-4e84-9d35-f104bc3d5ad4-kube-api-access-wsqjb\") pod \"glance-operator-controller-manager-67dd55ff59-kk6mj\" (UID: \"be4be3e5-1589-4e84-9d35-f104bc3d5ad4\") " pod="openstack-operators/glance-operator-controller-manager-67dd55ff59-kk6mj" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.574254 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzws7\" (UniqueName: \"kubernetes.io/projected/32019c4d-61e3-4c27-86d4-3a79bb40ce70-kube-api-access-jzws7\") pod \"keystone-operator-controller-manager-55f684fd56-2k8pd\" (UID: \"32019c4d-61e3-4c27-86d4-3a79bb40ce70\") " pod="openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.585325 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv4mh\" (UniqueName: \"kubernetes.io/projected/f621b9f3-ead9-4fab-b33f-f9d6179e8f3f-kube-api-access-qv4mh\") pod \"heat-operator-controller-manager-575ffb885b-57zhq\" (UID: \"f621b9f3-ead9-4fab-b33f-f9d6179e8f3f\") " pod="openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.585329 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6z7sm\" (UniqueName: \"kubernetes.io/projected/ff52e5a2-2aab-451a-8869-72c1a506940a-kube-api-access-6z7sm\") pod \"ironic-operator-controller-manager-768b776ffb-j7klk\" (UID: \"ff52e5a2-2aab-451a-8869-72c1a506940a\") " pod="openstack-operators/ironic-operator-controller-manager-768b776ffb-j7klk" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.585994 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gp8m\" (UniqueName: \"kubernetes.io/projected/dd8dbc24-8fb8-4ae2-96c3-b7112c7d7c65-kube-api-access-5gp8m\") pod \"designate-operator-controller-manager-77554cdc5c-627g6\" (UID: \"dd8dbc24-8fb8-4ae2-96c3-b7112c7d7c65\") " pod="openstack-operators/designate-operator-controller-manager-77554cdc5c-627g6" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.587291 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jb46d\" (UniqueName: \"kubernetes.io/projected/a21cc0b9-75d2-4ddb-925e-23eeaac2dd35-kube-api-access-jb46d\") pod \"horizon-operator-controller-manager-77d5c5b54f-vkmnm\" (UID: \"a21cc0b9-75d2-4ddb-925e-23eeaac2dd35\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.594083 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csw77\" (UniqueName: \"kubernetes.io/projected/c634491f-6069-472d-bff7-d8903d0afa1d-kube-api-access-csw77\") pod \"infra-operator-controller-manager-7d75bc88d5-drtqw\" (UID: \"c634491f-6069-472d-bff7-d8903d0afa1d\") " pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.597972 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.599047 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.606497 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.606520 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-wrq8q" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.609985 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8r9f\" (UniqueName: \"kubernetes.io/projected/be114c8e-3aa0-41b4-9954-d07c800d3cfc-kube-api-access-z8r9f\") pod \"neutron-operator-controller-manager-7ffd8d76d4-vk5vt\" (UID: \"be114c8e-3aa0-41b4-9954-d07c800d3cfc\") " pod="openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.610111 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vdjb\" (UniqueName: \"kubernetes.io/projected/3ad17a28-2cb1-4455-8d38-58919a287ae6-kube-api-access-2vdjb\") pod \"nova-operator-controller-manager-57d6b69d8b-s8kvs\" (UID: \"3ad17a28-2cb1-4455-8d38-58919a287ae6\") " pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.610150 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmbfj\" (UniqueName: \"kubernetes.io/projected/e59e94f5-9033-488d-a186-476cb6cbb3f0-kube-api-access-zmbfj\") pod \"mariadb-operator-controller-manager-6b9fb5fdcb-88gn4\" (UID: \"e59e94f5-9033-488d-a186-476cb6cbb3f0\") " pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-88gn4" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.610194 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx5xl\" (UniqueName: \"kubernetes.io/projected/59efc0ff-5727-48f9-91b7-36533ba5f94a-kube-api-access-cx5xl\") pod \"manila-operator-controller-manager-849fcfbb6b-tbr5m\" (UID: \"59efc0ff-5727-48f9-91b7-36533ba5f94a\") " pod="openstack-operators/manila-operator-controller-manager-849fcfbb6b-tbr5m" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.654340 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.655729 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.658441 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-sgrt9" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.680622 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-79d5ccc684-qhnz4"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.681106 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-768b776ffb-j7klk" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.681813 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-qhnz4" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.684390 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-jqrcp" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.689810 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.711025 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.725844 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-65ff799cfd-d5r5z" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.726437 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vdjb\" (UniqueName: \"kubernetes.io/projected/3ad17a28-2cb1-4455-8d38-58919a287ae6-kube-api-access-2vdjb\") pod \"nova-operator-controller-manager-57d6b69d8b-s8kvs\" (UID: \"3ad17a28-2cb1-4455-8d38-58919a287ae6\") " pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.727007 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmbfj\" (UniqueName: \"kubernetes.io/projected/e59e94f5-9033-488d-a186-476cb6cbb3f0-kube-api-access-zmbfj\") pod \"mariadb-operator-controller-manager-6b9fb5fdcb-88gn4\" (UID: \"e59e94f5-9033-488d-a186-476cb6cbb3f0\") " pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-88gn4" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.727054 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbvs8\" (UniqueName: \"kubernetes.io/projected/601fe40b-5553-4225-b5bd-214428d6fa68-kube-api-access-fbvs8\") pod \"octavia-operator-controller-manager-7875d7675-x4nz5\" (UID: \"601fe40b-5553-4225-b5bd-214428d6fa68\") " pod="openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.727086 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx5xl\" (UniqueName: \"kubernetes.io/projected/59efc0ff-5727-48f9-91b7-36533ba5f94a-kube-api-access-cx5xl\") pod \"manila-operator-controller-manager-849fcfbb6b-tbr5m\" (UID: \"59efc0ff-5727-48f9-91b7-36533ba5f94a\") " pod="openstack-operators/manila-operator-controller-manager-849fcfbb6b-tbr5m" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.727135 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8r9f\" (UniqueName: \"kubernetes.io/projected/be114c8e-3aa0-41b4-9954-d07c800d3cfc-kube-api-access-z8r9f\") pod \"neutron-operator-controller-manager-7ffd8d76d4-vk5vt\" (UID: \"be114c8e-3aa0-41b4-9954-d07c800d3cfc\") " pod="openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.727166 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz9jt\" (UniqueName: \"kubernetes.io/projected/116b0532-a8d5-47ec-8e12-e7ef482094d6-kube-api-access-fz9jt\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq\" (UID: \"116b0532-a8d5-47ec-8e12-e7ef482094d6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.727225 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq\" (UID: \"116b0532-a8d5-47ec-8e12-e7ef482094d6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.728008 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-79d5ccc684-qhnz4"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.739685 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-pxbjw"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.740538 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-pxbjw" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.747524 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.749043 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-lvmhv" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.770658 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.770997 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vdjb\" (UniqueName: \"kubernetes.io/projected/3ad17a28-2cb1-4455-8d38-58919a287ae6-kube-api-access-2vdjb\") pod \"nova-operator-controller-manager-57d6b69d8b-s8kvs\" (UID: \"3ad17a28-2cb1-4455-8d38-58919a287ae6\") " pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.773988 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.782846 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-pxbjw"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.784440 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8r9f\" (UniqueName: \"kubernetes.io/projected/be114c8e-3aa0-41b4-9954-d07c800d3cfc-kube-api-access-z8r9f\") pod \"neutron-operator-controller-manager-7ffd8d76d4-vk5vt\" (UID: \"be114c8e-3aa0-41b4-9954-d07c800d3cfc\") " pod="openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.794048 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-77554cdc5c-627g6" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.805402 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-799bc87c89-cqpjk"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.808310 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmbfj\" (UniqueName: \"kubernetes.io/projected/e59e94f5-9033-488d-a186-476cb6cbb3f0-kube-api-access-zmbfj\") pod \"mariadb-operator-controller-manager-6b9fb5fdcb-88gn4\" (UID: \"e59e94f5-9033-488d-a186-476cb6cbb3f0\") " pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-88gn4" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.808334 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-799bc87c89-cqpjk" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.815309 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx5xl\" (UniqueName: \"kubernetes.io/projected/59efc0ff-5727-48f9-91b7-36533ba5f94a-kube-api-access-cx5xl\") pod \"manila-operator-controller-manager-849fcfbb6b-tbr5m\" (UID: \"59efc0ff-5727-48f9-91b7-36533ba5f94a\") " pod="openstack-operators/manila-operator-controller-manager-849fcfbb6b-tbr5m" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.816173 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-7jhlh" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.828359 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqndc\" (UniqueName: \"kubernetes.io/projected/a552e9e7-f9fa-4b71-9ec6-848a2230279f-kube-api-access-fqndc\") pod \"placement-operator-controller-manager-79d5ccc684-qhnz4\" (UID: \"a552e9e7-f9fa-4b71-9ec6-848a2230279f\") " pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-qhnz4" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.828444 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz9jt\" (UniqueName: \"kubernetes.io/projected/116b0532-a8d5-47ec-8e12-e7ef482094d6-kube-api-access-fz9jt\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq\" (UID: \"116b0532-a8d5-47ec-8e12-e7ef482094d6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.828501 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq\" (UID: \"116b0532-a8d5-47ec-8e12-e7ef482094d6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.828542 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njgtm\" (UniqueName: \"kubernetes.io/projected/7cae616e-6aa8-405f-b10a-2a5346fae5b4-kube-api-access-njgtm\") pod \"ovn-operator-controller-manager-6f75f45d54-l9mc7\" (UID: \"7cae616e-6aa8-405f-b10a-2a5346fae5b4\") " pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.828577 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbvs8\" (UniqueName: \"kubernetes.io/projected/601fe40b-5553-4225-b5bd-214428d6fa68-kube-api-access-fbvs8\") pod \"octavia-operator-controller-manager-7875d7675-x4nz5\" (UID: \"601fe40b-5553-4225-b5bd-214428d6fa68\") " pod="openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.829047 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-67dd55ff59-kk6mj" Jan 27 08:07:52 crc kubenswrapper[4787]: E0127 08:07:52.829746 4787 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 27 08:07:52 crc kubenswrapper[4787]: E0127 08:07:52.829860 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert podName:116b0532-a8d5-47ec-8e12-e7ef482094d6 nodeName:}" failed. No retries permitted until 2026-01-27 08:07:53.329837771 +0000 UTC m=+978.982193263 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" (UID: "116b0532-a8d5-47ec-8e12-e7ef482094d6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.858647 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.869381 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-799bc87c89-cqpjk"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.871951 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-849fcfbb6b-tbr5m" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.872439 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.875799 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz9jt\" (UniqueName: \"kubernetes.io/projected/116b0532-a8d5-47ec-8e12-e7ef482094d6-kube-api-access-fz9jt\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq\" (UID: \"116b0532-a8d5-47ec-8e12-e7ef482094d6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.892862 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbvs8\" (UniqueName: \"kubernetes.io/projected/601fe40b-5553-4225-b5bd-214428d6fa68-kube-api-access-fbvs8\") pod \"octavia-operator-controller-manager-7875d7675-x4nz5\" (UID: \"601fe40b-5553-4225-b5bd-214428d6fa68\") " pod="openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.929469 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njgtm\" (UniqueName: \"kubernetes.io/projected/7cae616e-6aa8-405f-b10a-2a5346fae5b4-kube-api-access-njgtm\") pod \"ovn-operator-controller-manager-6f75f45d54-l9mc7\" (UID: \"7cae616e-6aa8-405f-b10a-2a5346fae5b4\") " pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.929574 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqndc\" (UniqueName: \"kubernetes.io/projected/a552e9e7-f9fa-4b71-9ec6-848a2230279f-kube-api-access-fqndc\") pod \"placement-operator-controller-manager-79d5ccc684-qhnz4\" (UID: \"a552e9e7-f9fa-4b71-9ec6-848a2230279f\") " pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-qhnz4" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.929612 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksbq4\" (UniqueName: \"kubernetes.io/projected/89d7a7e7-443a-486f-b8b7-a024082b9fba-kube-api-access-ksbq4\") pod \"swift-operator-controller-manager-547cbdb99f-pxbjw\" (UID: \"89d7a7e7-443a-486f-b8b7-a024082b9fba\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-pxbjw" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.929676 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrjsf\" (UniqueName: \"kubernetes.io/projected/7986c3dd-6d06-4892-9fed-1b396669685b-kube-api-access-wrjsf\") pod \"telemetry-operator-controller-manager-799bc87c89-cqpjk\" (UID: \"7986c3dd-6d06-4892-9fed-1b396669685b\") " pod="openstack-operators/telemetry-operator-controller-manager-799bc87c89-cqpjk" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.941122 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-dnxpj"] Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.952848 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-88gn4" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.954198 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-dnxpj" Jan 27 08:07:52 crc kubenswrapper[4787]: I0127 08:07:52.975298 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-678l7" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.002301 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqndc\" (UniqueName: \"kubernetes.io/projected/a552e9e7-f9fa-4b71-9ec6-848a2230279f-kube-api-access-fqndc\") pod \"placement-operator-controller-manager-79d5ccc684-qhnz4\" (UID: \"a552e9e7-f9fa-4b71-9ec6-848a2230279f\") " pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-qhnz4" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.002429 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njgtm\" (UniqueName: \"kubernetes.io/projected/7cae616e-6aa8-405f-b10a-2a5346fae5b4-kube-api-access-njgtm\") pod \"ovn-operator-controller-manager-6f75f45d54-l9mc7\" (UID: \"7cae616e-6aa8-405f-b10a-2a5346fae5b4\") " pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.010235 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.022414 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-qhnz4" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.024075 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-dnxpj"] Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.039168 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksbq4\" (UniqueName: \"kubernetes.io/projected/89d7a7e7-443a-486f-b8b7-a024082b9fba-kube-api-access-ksbq4\") pod \"swift-operator-controller-manager-547cbdb99f-pxbjw\" (UID: \"89d7a7e7-443a-486f-b8b7-a024082b9fba\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-pxbjw" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.039304 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrjsf\" (UniqueName: \"kubernetes.io/projected/7986c3dd-6d06-4892-9fed-1b396669685b-kube-api-access-wrjsf\") pod \"telemetry-operator-controller-manager-799bc87c89-cqpjk\" (UID: \"7986c3dd-6d06-4892-9fed-1b396669685b\") " pod="openstack-operators/telemetry-operator-controller-manager-799bc87c89-cqpjk" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.039360 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert\") pod \"infra-operator-controller-manager-7d75bc88d5-drtqw\" (UID: \"c634491f-6069-472d-bff7-d8903d0afa1d\") " pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:07:53 crc kubenswrapper[4787]: E0127 08:07:53.039620 4787 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 27 08:07:53 crc kubenswrapper[4787]: E0127 08:07:53.039714 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert podName:c634491f-6069-472d-bff7-d8903d0afa1d nodeName:}" failed. No retries permitted until 2026-01-27 08:07:54.039686385 +0000 UTC m=+979.692041877 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert") pod "infra-operator-controller-manager-7d75bc88d5-drtqw" (UID: "c634491f-6069-472d-bff7-d8903d0afa1d") : secret "infra-operator-webhook-server-cert" not found Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.068153 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksbq4\" (UniqueName: \"kubernetes.io/projected/89d7a7e7-443a-486f-b8b7-a024082b9fba-kube-api-access-ksbq4\") pod \"swift-operator-controller-manager-547cbdb99f-pxbjw\" (UID: \"89d7a7e7-443a-486f-b8b7-a024082b9fba\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-pxbjw" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.071933 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrjsf\" (UniqueName: \"kubernetes.io/projected/7986c3dd-6d06-4892-9fed-1b396669685b-kube-api-access-wrjsf\") pod \"telemetry-operator-controller-manager-799bc87c89-cqpjk\" (UID: \"7986c3dd-6d06-4892-9fed-1b396669685b\") " pod="openstack-operators/telemetry-operator-controller-manager-799bc87c89-cqpjk" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.075198 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9"] Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.083827 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.086081 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.112450 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-7jstv" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.142067 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkf4m\" (UniqueName: \"kubernetes.io/projected/7ec92388-f799-43cc-9235-6d4b717bc98e-kube-api-access-hkf4m\") pod \"test-operator-controller-manager-69797bbcbd-dnxpj\" (UID: \"7ec92388-f799-43cc-9235-6d4b717bc98e\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-dnxpj" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.142489 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-799bc87c89-cqpjk" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.153050 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9"] Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.182242 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn"] Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.200426 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn"] Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.200491 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5lxsh"] Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.201839 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5lxsh" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.201932 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.208160 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-6z6nd" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.209866 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.210150 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5lxsh"] Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.210485 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-qvx9b" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.212313 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.241048 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.247029 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkf4m\" (UniqueName: \"kubernetes.io/projected/7ec92388-f799-43cc-9235-6d4b717bc98e-kube-api-access-hkf4m\") pod \"test-operator-controller-manager-69797bbcbd-dnxpj\" (UID: \"7ec92388-f799-43cc-9235-6d4b717bc98e\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-dnxpj" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.247702 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgs22\" (UniqueName: \"kubernetes.io/projected/292c617a-b64f-4b53-b05e-360769308e43-kube-api-access-vgs22\") pod \"watcher-operator-controller-manager-75db85654f-8cmb9\" (UID: \"292c617a-b64f-4b53-b05e-360769308e43\") " pod="openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.296215 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkf4m\" (UniqueName: \"kubernetes.io/projected/7ec92388-f799-43cc-9235-6d4b717bc98e-kube-api-access-hkf4m\") pod \"test-operator-controller-manager-69797bbcbd-dnxpj\" (UID: \"7ec92388-f799-43cc-9235-6d4b717bc98e\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-dnxpj" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.323017 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd"] Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.352446 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgs22\" (UniqueName: \"kubernetes.io/projected/292c617a-b64f-4b53-b05e-360769308e43-kube-api-access-vgs22\") pod \"watcher-operator-controller-manager-75db85654f-8cmb9\" (UID: \"292c617a-b64f-4b53-b05e-360769308e43\") " pod="openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.352525 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.352593 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq\" (UID: \"116b0532-a8d5-47ec-8e12-e7ef482094d6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.352654 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvwxw\" (UniqueName: \"kubernetes.io/projected/7a0ecc5c-eebe-457b-9a72-804226878c7f-kube-api-access-rvwxw\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.352719 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.352739 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7r9d\" (UniqueName: \"kubernetes.io/projected/2cf3085a-51c4-42a7-a788-8e9dd9dbe4c7-kube-api-access-g7r9d\") pod \"rabbitmq-cluster-operator-manager-668c99d594-5lxsh\" (UID: \"2cf3085a-51c4-42a7-a788-8e9dd9dbe4c7\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5lxsh" Jan 27 08:07:53 crc kubenswrapper[4787]: E0127 08:07:53.352855 4787 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 27 08:07:53 crc kubenswrapper[4787]: E0127 08:07:53.352963 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert podName:116b0532-a8d5-47ec-8e12-e7ef482094d6 nodeName:}" failed. No retries permitted until 2026-01-27 08:07:54.352938514 +0000 UTC m=+980.005293996 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" (UID: "116b0532-a8d5-47ec-8e12-e7ef482094d6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.357392 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-pxbjw" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.384342 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgs22\" (UniqueName: \"kubernetes.io/projected/292c617a-b64f-4b53-b05e-360769308e43-kube-api-access-vgs22\") pod \"watcher-operator-controller-manager-75db85654f-8cmb9\" (UID: \"292c617a-b64f-4b53-b05e-360769308e43\") " pod="openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.435514 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-655bf9cfbb-6tnpg"] Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.458780 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvwxw\" (UniqueName: \"kubernetes.io/projected/7a0ecc5c-eebe-457b-9a72-804226878c7f-kube-api-access-rvwxw\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.458935 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.459002 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7r9d\" (UniqueName: \"kubernetes.io/projected/2cf3085a-51c4-42a7-a788-8e9dd9dbe4c7-kube-api-access-g7r9d\") pod \"rabbitmq-cluster-operator-manager-668c99d594-5lxsh\" (UID: \"2cf3085a-51c4-42a7-a788-8e9dd9dbe4c7\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5lxsh" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.459102 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:07:53 crc kubenswrapper[4787]: E0127 08:07:53.459394 4787 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 27 08:07:53 crc kubenswrapper[4787]: E0127 08:07:53.459652 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs podName:7a0ecc5c-eebe-457b-9a72-804226878c7f nodeName:}" failed. No retries permitted until 2026-01-27 08:07:53.959629529 +0000 UTC m=+979.611985021 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs") pod "openstack-operator-controller-manager-c595dbb59-xc2xn" (UID: "7a0ecc5c-eebe-457b-9a72-804226878c7f") : secret "metrics-server-cert" not found Jan 27 08:07:53 crc kubenswrapper[4787]: E0127 08:07:53.461096 4787 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 27 08:07:53 crc kubenswrapper[4787]: E0127 08:07:53.461187 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs podName:7a0ecc5c-eebe-457b-9a72-804226878c7f nodeName:}" failed. No retries permitted until 2026-01-27 08:07:53.961164252 +0000 UTC m=+979.613519744 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs") pod "openstack-operator-controller-manager-c595dbb59-xc2xn" (UID: "7a0ecc5c-eebe-457b-9a72-804226878c7f") : secret "webhook-server-cert" not found Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.462838 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-dnxpj" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.477697 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-65ff799cfd-d5r5z"] Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.497529 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.498465 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvwxw\" (UniqueName: \"kubernetes.io/projected/7a0ecc5c-eebe-457b-9a72-804226878c7f-kube-api-access-rvwxw\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.500145 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7r9d\" (UniqueName: \"kubernetes.io/projected/2cf3085a-51c4-42a7-a788-8e9dd9dbe4c7-kube-api-access-g7r9d\") pod \"rabbitmq-cluster-operator-manager-668c99d594-5lxsh\" (UID: \"2cf3085a-51c4-42a7-a788-8e9dd9dbe4c7\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5lxsh" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.569101 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5lxsh" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.607713 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-768b776ffb-j7klk"] Jan 27 08:07:53 crc kubenswrapper[4787]: W0127 08:07:53.669068 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff52e5a2_2aab_451a_8869_72c1a506940a.slice/crio-032da21e32681cb8b3f71da507d5b318acc4a91170de4f0fb91682730607f472 WatchSource:0}: Error finding container 032da21e32681cb8b3f71da507d5b318acc4a91170de4f0fb91682730607f472: Status 404 returned error can't find the container with id 032da21e32681cb8b3f71da507d5b318acc4a91170de4f0fb91682730607f472 Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.974722 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.974999 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:07:53 crc kubenswrapper[4787]: E0127 08:07:53.975143 4787 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 27 08:07:53 crc kubenswrapper[4787]: E0127 08:07:53.975280 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs podName:7a0ecc5c-eebe-457b-9a72-804226878c7f nodeName:}" failed. No retries permitted until 2026-01-27 08:07:54.975247238 +0000 UTC m=+980.627602890 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs") pod "openstack-operator-controller-manager-c595dbb59-xc2xn" (UID: "7a0ecc5c-eebe-457b-9a72-804226878c7f") : secret "webhook-server-cert" not found Jan 27 08:07:53 crc kubenswrapper[4787]: E0127 08:07:53.975281 4787 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 27 08:07:53 crc kubenswrapper[4787]: I0127 08:07:53.975328 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-77554cdc5c-627g6"] Jan 27 08:07:53 crc kubenswrapper[4787]: E0127 08:07:53.975357 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs podName:7a0ecc5c-eebe-457b-9a72-804226878c7f nodeName:}" failed. No retries permitted until 2026-01-27 08:07:54.975339581 +0000 UTC m=+980.627695073 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs") pod "openstack-operator-controller-manager-c595dbb59-xc2xn" (UID: "7a0ecc5c-eebe-457b-9a72-804226878c7f") : secret "metrics-server-cert" not found Jan 27 08:07:53 crc kubenswrapper[4787]: W0127 08:07:53.979571 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd8dbc24_8fb8_4ae2_96c3_b7112c7d7c65.slice/crio-eca53bd6055d939eecd9fca096d39ca6f8a7901664fa791bcc77bc01443ec10d WatchSource:0}: Error finding container eca53bd6055d939eecd9fca096d39ca6f8a7901664fa791bcc77bc01443ec10d: Status 404 returned error can't find the container with id eca53bd6055d939eecd9fca096d39ca6f8a7901664fa791bcc77bc01443ec10d Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.076766 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert\") pod \"infra-operator-controller-manager-7d75bc88d5-drtqw\" (UID: \"c634491f-6069-472d-bff7-d8903d0afa1d\") " pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:07:54 crc kubenswrapper[4787]: E0127 08:07:54.077438 4787 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 27 08:07:54 crc kubenswrapper[4787]: E0127 08:07:54.077562 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert podName:c634491f-6069-472d-bff7-d8903d0afa1d nodeName:}" failed. No retries permitted until 2026-01-27 08:07:56.077522319 +0000 UTC m=+981.729877811 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert") pod "infra-operator-controller-manager-7d75bc88d5-drtqw" (UID: "c634491f-6069-472d-bff7-d8903d0afa1d") : secret "infra-operator-webhook-server-cert" not found Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.162679 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs"] Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.188015 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-849fcfbb6b-tbr5m"] Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.199330 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-67dd55ff59-kk6mj"] Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.213574 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-88gn4"] Jan 27 08:07:54 crc kubenswrapper[4787]: W0127 08:07:54.234310 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode59e94f5_9033_488d_a186_476cb6cbb3f0.slice/crio-163193571d08059017947cd7cf8fd0d1cb32cef8703ab7dcaeb079aaf4905a0e WatchSource:0}: Error finding container 163193571d08059017947cd7cf8fd0d1cb32cef8703ab7dcaeb079aaf4905a0e: Status 404 returned error can't find the container with id 163193571d08059017947cd7cf8fd0d1cb32cef8703ab7dcaeb079aaf4905a0e Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.383802 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq\" (UID: \"116b0532-a8d5-47ec-8e12-e7ef482094d6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:07:54 crc kubenswrapper[4787]: E0127 08:07:54.384024 4787 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 27 08:07:54 crc kubenswrapper[4787]: E0127 08:07:54.384145 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert podName:116b0532-a8d5-47ec-8e12-e7ef482094d6 nodeName:}" failed. No retries permitted until 2026-01-27 08:07:56.38410064 +0000 UTC m=+982.036456132 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" (UID: "116b0532-a8d5-47ec-8e12-e7ef482094d6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.405351 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-799bc87c89-cqpjk"] Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.409722 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq"] Jan 27 08:07:54 crc kubenswrapper[4787]: W0127 08:07:54.418271 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf621b9f3_ead9_4fab_b33f_f9d6179e8f3f.slice/crio-330c6e5a690fb8746a29b9b9a618e088182e74f96ff95aba590f091110934895 WatchSource:0}: Error finding container 330c6e5a690fb8746a29b9b9a618e088182e74f96ff95aba590f091110934895: Status 404 returned error can't find the container with id 330c6e5a690fb8746a29b9b9a618e088182e74f96ff95aba590f091110934895 Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.419204 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-79d5ccc684-qhnz4"] Jan 27 08:07:54 crc kubenswrapper[4787]: W0127 08:07:54.427062 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda552e9e7_f9fa_4b71_9ec6_848a2230279f.slice/crio-1b5f31b17f22d68dcd85e84abeeb681ebb8be676669effbdd506fe2c2680cb3b WatchSource:0}: Error finding container 1b5f31b17f22d68dcd85e84abeeb681ebb8be676669effbdd506fe2c2680cb3b: Status 404 returned error can't find the container with id 1b5f31b17f22d68dcd85e84abeeb681ebb8be676669effbdd506fe2c2680cb3b Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.427572 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-pxbjw"] Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.440137 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-655bf9cfbb-6tnpg" event={"ID":"85bafad5-30f3-4931-bd2a-0d45e2b0f844","Type":"ContainerStarted","Data":"3f3872a2defe40e27c44947b8f9e9b0563804154fae31f371a3f18711ac435c9"} Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.444626 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-77554cdc5c-627g6" event={"ID":"dd8dbc24-8fb8-4ae2-96c3-b7112c7d7c65","Type":"ContainerStarted","Data":"eca53bd6055d939eecd9fca096d39ca6f8a7901664fa791bcc77bc01443ec10d"} Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.448758 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-768b776ffb-j7klk" event={"ID":"ff52e5a2-2aab-451a-8869-72c1a506940a","Type":"ContainerStarted","Data":"032da21e32681cb8b3f71da507d5b318acc4a91170de4f0fb91682730607f472"} Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.454012 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-849fcfbb6b-tbr5m" event={"ID":"59efc0ff-5727-48f9-91b7-36533ba5f94a","Type":"ContainerStarted","Data":"bcf91c200a19a125bd96b260351e3ca624f79e52d3faf95dd4a2a742e6300fad"} Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.457319 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-65ff799cfd-d5r5z" event={"ID":"4b5515cb-f539-4a9b-8c46-36c5c62c5c93","Type":"ContainerStarted","Data":"b9f2fd7edf94d8532f5bae448ea78f10923a0258cb6f52813337ff00de222430"} Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.458835 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-67dd55ff59-kk6mj" event={"ID":"be4be3e5-1589-4e84-9d35-f104bc3d5ad4","Type":"ContainerStarted","Data":"9c7808b0a996e5490e2f610d6bbcb3a413add6538ba4fbbaf60340489e0bae8d"} Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.461564 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd" event={"ID":"32019c4d-61e3-4c27-86d4-3a79bb40ce70","Type":"ContainerStarted","Data":"2d31230a0a208ed4a18bd1d643f883a599c2806dcbaf94afd08235b18e6894c4"} Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.463277 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" event={"ID":"3ad17a28-2cb1-4455-8d38-58919a287ae6","Type":"ContainerStarted","Data":"2b4b23f00d60ad642771edd29b4d11331ba1889dbee6abb9a2f10d40a9233501"} Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.464411 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-88gn4" event={"ID":"e59e94f5-9033-488d-a186-476cb6cbb3f0","Type":"ContainerStarted","Data":"163193571d08059017947cd7cf8fd0d1cb32cef8703ab7dcaeb079aaf4905a0e"} Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.596816 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt"] Jan 27 08:07:54 crc kubenswrapper[4787]: W0127 08:07:54.626128 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe114c8e_3aa0_41b4_9954_d07c800d3cfc.slice/crio-83607832ac2a45723a9201cf6c2ad1cfc4d676eff390ca75c8de7ad98f54f282 WatchSource:0}: Error finding container 83607832ac2a45723a9201cf6c2ad1cfc4d676eff390ca75c8de7ad98f54f282: Status 404 returned error can't find the container with id 83607832ac2a45723a9201cf6c2ad1cfc4d676eff390ca75c8de7ad98f54f282 Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.658213 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-dnxpj"] Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.681434 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7"] Jan 27 08:07:54 crc kubenswrapper[4787]: E0127 08:07:54.691057 4787 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/octavia-operator@sha256:bb8d23f38682e4b987b621a3116500a76d0dc380a1bfb9ea77f18dfacdee4f49,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fbvs8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-7875d7675-x4nz5_openstack-operators(601fe40b-5553-4225-b5bd-214428d6fa68): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 27 08:07:54 crc kubenswrapper[4787]: E0127 08:07:54.692202 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5" podUID="601fe40b-5553-4225-b5bd-214428d6fa68" Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.705940 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5"] Jan 27 08:07:54 crc kubenswrapper[4787]: W0127 08:07:54.709232 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod292c617a_b64f_4b53_b05e_360769308e43.slice/crio-fa0cc93f7c0c80a2c1c3d97c946f9567e38bf5acb54637c053b4ca79e9966972 WatchSource:0}: Error finding container fa0cc93f7c0c80a2c1c3d97c946f9567e38bf5acb54637c053b4ca79e9966972: Status 404 returned error can't find the container with id fa0cc93f7c0c80a2c1c3d97c946f9567e38bf5acb54637c053b4ca79e9966972 Jan 27 08:07:54 crc kubenswrapper[4787]: E0127 08:07:54.723119 4787 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/watcher-operator@sha256:01f06b67539933628ebeb3c8fe813467eb6b478ba9fd4c6ff9892b8306e04f7a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vgs22,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-75db85654f-8cmb9_openstack-operators(292c617a-b64f-4b53-b05e-360769308e43): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 27 08:07:54 crc kubenswrapper[4787]: E0127 08:07:54.733208 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9" podUID="292c617a-b64f-4b53-b05e-360769308e43" Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.737457 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5lxsh"] Jan 27 08:07:54 crc kubenswrapper[4787]: W0127 08:07:54.743423 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7cae616e_6aa8_405f_b10a_2a5346fae5b4.slice/crio-107eab0acfdcd94b74305a50569753b82cfa639bb204a4f0f8ade19977c7f5c1 WatchSource:0}: Error finding container 107eab0acfdcd94b74305a50569753b82cfa639bb204a4f0f8ade19977c7f5c1: Status 404 returned error can't find the container with id 107eab0acfdcd94b74305a50569753b82cfa639bb204a4f0f8ade19977c7f5c1 Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.748127 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm"] Jan 27 08:07:54 crc kubenswrapper[4787]: I0127 08:07:54.755464 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9"] Jan 27 08:07:54 crc kubenswrapper[4787]: E0127 08:07:54.761819 4787 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:3311e627bcb860d9443592a2c67078417318c9eb77d8ef4d07f9aa7027d46822,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jb46d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-77d5c5b54f-vkmnm_openstack-operators(a21cc0b9-75d2-4ddb-925e-23eeaac2dd35): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 27 08:07:54 crc kubenswrapper[4787]: E0127 08:07:54.763919 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm" podUID="a21cc0b9-75d2-4ddb-925e-23eeaac2dd35" Jan 27 08:07:54 crc kubenswrapper[4787]: E0127 08:07:54.768282 4787 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:fa46fc14710961e6b4a76a3522dca3aa3cfa71436c7cf7ade533d3712822f327,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-njgtm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-6f75f45d54-l9mc7_openstack-operators(7cae616e-6aa8-405f-b10a-2a5346fae5b4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 27 08:07:54 crc kubenswrapper[4787]: E0127 08:07:54.769821 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7" podUID="7cae616e-6aa8-405f-b10a-2a5346fae5b4" Jan 27 08:07:55 crc kubenswrapper[4787]: I0127 08:07:55.005443 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:07:55 crc kubenswrapper[4787]: I0127 08:07:55.005535 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:07:55 crc kubenswrapper[4787]: E0127 08:07:55.005643 4787 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 27 08:07:55 crc kubenswrapper[4787]: E0127 08:07:55.005749 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs podName:7a0ecc5c-eebe-457b-9a72-804226878c7f nodeName:}" failed. No retries permitted until 2026-01-27 08:07:57.00572117 +0000 UTC m=+982.658076662 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs") pod "openstack-operator-controller-manager-c595dbb59-xc2xn" (UID: "7a0ecc5c-eebe-457b-9a72-804226878c7f") : secret "webhook-server-cert" not found Jan 27 08:07:55 crc kubenswrapper[4787]: E0127 08:07:55.005943 4787 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 27 08:07:55 crc kubenswrapper[4787]: E0127 08:07:55.006090 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs podName:7a0ecc5c-eebe-457b-9a72-804226878c7f nodeName:}" failed. No retries permitted until 2026-01-27 08:07:57.006021528 +0000 UTC m=+982.658377140 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs") pod "openstack-operator-controller-manager-c595dbb59-xc2xn" (UID: "7a0ecc5c-eebe-457b-9a72-804226878c7f") : secret "metrics-server-cert" not found Jan 27 08:07:55 crc kubenswrapper[4787]: I0127 08:07:55.503415 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm" event={"ID":"a21cc0b9-75d2-4ddb-925e-23eeaac2dd35","Type":"ContainerStarted","Data":"7ef3864535971cf1d47a5004d29759b51abf27aa0dfe40f03e886d65c1b944fa"} Jan 27 08:07:55 crc kubenswrapper[4787]: E0127 08:07:55.513532 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:3311e627bcb860d9443592a2c67078417318c9eb77d8ef4d07f9aa7027d46822\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm" podUID="a21cc0b9-75d2-4ddb-925e-23eeaac2dd35" Jan 27 08:07:55 crc kubenswrapper[4787]: I0127 08:07:55.524198 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5lxsh" event={"ID":"2cf3085a-51c4-42a7-a788-8e9dd9dbe4c7","Type":"ContainerStarted","Data":"e1834a0dac7deedc263c0873a2ac7b531f871472a7d2c296f0207869bbc0e839"} Jan 27 08:07:55 crc kubenswrapper[4787]: I0127 08:07:55.549371 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-qhnz4" event={"ID":"a552e9e7-f9fa-4b71-9ec6-848a2230279f","Type":"ContainerStarted","Data":"1b5f31b17f22d68dcd85e84abeeb681ebb8be676669effbdd506fe2c2680cb3b"} Jan 27 08:07:55 crc kubenswrapper[4787]: I0127 08:07:55.552212 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7" event={"ID":"7cae616e-6aa8-405f-b10a-2a5346fae5b4","Type":"ContainerStarted","Data":"107eab0acfdcd94b74305a50569753b82cfa639bb204a4f0f8ade19977c7f5c1"} Jan 27 08:07:55 crc kubenswrapper[4787]: I0127 08:07:55.554756 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-pxbjw" event={"ID":"89d7a7e7-443a-486f-b8b7-a024082b9fba","Type":"ContainerStarted","Data":"f51b1dc9927bde3d2ebc96fcdf0f6ba6cf63fed25568162cd40133d5187d30a5"} Jan 27 08:07:55 crc kubenswrapper[4787]: E0127 08:07:55.555003 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:fa46fc14710961e6b4a76a3522dca3aa3cfa71436c7cf7ade533d3712822f327\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7" podUID="7cae616e-6aa8-405f-b10a-2a5346fae5b4" Jan 27 08:07:55 crc kubenswrapper[4787]: I0127 08:07:55.564023 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5" event={"ID":"601fe40b-5553-4225-b5bd-214428d6fa68","Type":"ContainerStarted","Data":"03a831befd65af301aee0eb9feea4090c097cb6b811d96d224e221128e7e239c"} Jan 27 08:07:55 crc kubenswrapper[4787]: E0127 08:07:55.571341 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/octavia-operator@sha256:bb8d23f38682e4b987b621a3116500a76d0dc380a1bfb9ea77f18dfacdee4f49\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5" podUID="601fe40b-5553-4225-b5bd-214428d6fa68" Jan 27 08:07:55 crc kubenswrapper[4787]: I0127 08:07:55.615749 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-dnxpj" event={"ID":"7ec92388-f799-43cc-9235-6d4b717bc98e","Type":"ContainerStarted","Data":"ec38725f2bbdc76cb40f38f81bd044c96a957dfb80218ce8b829a6d2af1d778b"} Jan 27 08:07:55 crc kubenswrapper[4787]: I0127 08:07:55.619569 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq" event={"ID":"f621b9f3-ead9-4fab-b33f-f9d6179e8f3f","Type":"ContainerStarted","Data":"330c6e5a690fb8746a29b9b9a618e088182e74f96ff95aba590f091110934895"} Jan 27 08:07:55 crc kubenswrapper[4787]: I0127 08:07:55.621114 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt" event={"ID":"be114c8e-3aa0-41b4-9954-d07c800d3cfc","Type":"ContainerStarted","Data":"83607832ac2a45723a9201cf6c2ad1cfc4d676eff390ca75c8de7ad98f54f282"} Jan 27 08:07:55 crc kubenswrapper[4787]: I0127 08:07:55.624500 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-799bc87c89-cqpjk" event={"ID":"7986c3dd-6d06-4892-9fed-1b396669685b","Type":"ContainerStarted","Data":"f92bac400cb19bab8b6063da65c14f8a6168df5b411b283aa8b61db6ca94bcfe"} Jan 27 08:07:55 crc kubenswrapper[4787]: I0127 08:07:55.628104 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9" event={"ID":"292c617a-b64f-4b53-b05e-360769308e43","Type":"ContainerStarted","Data":"fa0cc93f7c0c80a2c1c3d97c946f9567e38bf5acb54637c053b4ca79e9966972"} Jan 27 08:07:55 crc kubenswrapper[4787]: E0127 08:07:55.630218 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/watcher-operator@sha256:01f06b67539933628ebeb3c8fe813467eb6b478ba9fd4c6ff9892b8306e04f7a\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9" podUID="292c617a-b64f-4b53-b05e-360769308e43" Jan 27 08:07:56 crc kubenswrapper[4787]: I0127 08:07:56.139053 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert\") pod \"infra-operator-controller-manager-7d75bc88d5-drtqw\" (UID: \"c634491f-6069-472d-bff7-d8903d0afa1d\") " pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:07:56 crc kubenswrapper[4787]: E0127 08:07:56.139301 4787 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 27 08:07:56 crc kubenswrapper[4787]: E0127 08:07:56.139417 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert podName:c634491f-6069-472d-bff7-d8903d0afa1d nodeName:}" failed. No retries permitted until 2026-01-27 08:08:00.139386682 +0000 UTC m=+985.791742364 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert") pod "infra-operator-controller-manager-7d75bc88d5-drtqw" (UID: "c634491f-6069-472d-bff7-d8903d0afa1d") : secret "infra-operator-webhook-server-cert" not found Jan 27 08:07:56 crc kubenswrapper[4787]: I0127 08:07:56.444314 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq\" (UID: \"116b0532-a8d5-47ec-8e12-e7ef482094d6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:07:56 crc kubenswrapper[4787]: E0127 08:07:56.444601 4787 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 27 08:07:56 crc kubenswrapper[4787]: E0127 08:07:56.446481 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert podName:116b0532-a8d5-47ec-8e12-e7ef482094d6 nodeName:}" failed. No retries permitted until 2026-01-27 08:08:00.445858413 +0000 UTC m=+986.098213905 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" (UID: "116b0532-a8d5-47ec-8e12-e7ef482094d6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 27 08:07:56 crc kubenswrapper[4787]: E0127 08:07:56.647230 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:fa46fc14710961e6b4a76a3522dca3aa3cfa71436c7cf7ade533d3712822f327\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7" podUID="7cae616e-6aa8-405f-b10a-2a5346fae5b4" Jan 27 08:07:56 crc kubenswrapper[4787]: E0127 08:07:56.647661 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/octavia-operator@sha256:bb8d23f38682e4b987b621a3116500a76d0dc380a1bfb9ea77f18dfacdee4f49\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5" podUID="601fe40b-5553-4225-b5bd-214428d6fa68" Jan 27 08:07:56 crc kubenswrapper[4787]: E0127 08:07:56.647717 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:3311e627bcb860d9443592a2c67078417318c9eb77d8ef4d07f9aa7027d46822\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm" podUID="a21cc0b9-75d2-4ddb-925e-23eeaac2dd35" Jan 27 08:07:56 crc kubenswrapper[4787]: E0127 08:07:56.647983 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/watcher-operator@sha256:01f06b67539933628ebeb3c8fe813467eb6b478ba9fd4c6ff9892b8306e04f7a\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9" podUID="292c617a-b64f-4b53-b05e-360769308e43" Jan 27 08:07:57 crc kubenswrapper[4787]: I0127 08:07:57.054874 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:07:57 crc kubenswrapper[4787]: I0127 08:07:57.054946 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:07:57 crc kubenswrapper[4787]: E0127 08:07:57.055115 4787 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 27 08:07:57 crc kubenswrapper[4787]: E0127 08:07:57.055174 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs podName:7a0ecc5c-eebe-457b-9a72-804226878c7f nodeName:}" failed. No retries permitted until 2026-01-27 08:08:01.055155914 +0000 UTC m=+986.707511406 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs") pod "openstack-operator-controller-manager-c595dbb59-xc2xn" (UID: "7a0ecc5c-eebe-457b-9a72-804226878c7f") : secret "metrics-server-cert" not found Jan 27 08:07:57 crc kubenswrapper[4787]: E0127 08:07:57.055220 4787 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 27 08:07:57 crc kubenswrapper[4787]: E0127 08:07:57.055241 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs podName:7a0ecc5c-eebe-457b-9a72-804226878c7f nodeName:}" failed. No retries permitted until 2026-01-27 08:08:01.055235846 +0000 UTC m=+986.707591338 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs") pod "openstack-operator-controller-manager-c595dbb59-xc2xn" (UID: "7a0ecc5c-eebe-457b-9a72-804226878c7f") : secret "webhook-server-cert" not found Jan 27 08:08:00 crc kubenswrapper[4787]: I0127 08:08:00.224567 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert\") pod \"infra-operator-controller-manager-7d75bc88d5-drtqw\" (UID: \"c634491f-6069-472d-bff7-d8903d0afa1d\") " pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:08:00 crc kubenswrapper[4787]: E0127 08:08:00.224908 4787 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 27 08:08:00 crc kubenswrapper[4787]: E0127 08:08:00.225034 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert podName:c634491f-6069-472d-bff7-d8903d0afa1d nodeName:}" failed. No retries permitted until 2026-01-27 08:08:08.225003821 +0000 UTC m=+993.877359483 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert") pod "infra-operator-controller-manager-7d75bc88d5-drtqw" (UID: "c634491f-6069-472d-bff7-d8903d0afa1d") : secret "infra-operator-webhook-server-cert" not found Jan 27 08:08:00 crc kubenswrapper[4787]: I0127 08:08:00.530159 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq\" (UID: \"116b0532-a8d5-47ec-8e12-e7ef482094d6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:08:00 crc kubenswrapper[4787]: E0127 08:08:00.530497 4787 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 27 08:08:00 crc kubenswrapper[4787]: E0127 08:08:00.530656 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert podName:116b0532-a8d5-47ec-8e12-e7ef482094d6 nodeName:}" failed. No retries permitted until 2026-01-27 08:08:08.530620883 +0000 UTC m=+994.182976405 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" (UID: "116b0532-a8d5-47ec-8e12-e7ef482094d6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 27 08:08:01 crc kubenswrapper[4787]: I0127 08:08:01.141132 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:08:01 crc kubenswrapper[4787]: I0127 08:08:01.141340 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:08:01 crc kubenswrapper[4787]: E0127 08:08:01.141433 4787 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 27 08:08:01 crc kubenswrapper[4787]: E0127 08:08:01.141522 4787 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 27 08:08:01 crc kubenswrapper[4787]: E0127 08:08:01.141618 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs podName:7a0ecc5c-eebe-457b-9a72-804226878c7f nodeName:}" failed. No retries permitted until 2026-01-27 08:08:09.141595491 +0000 UTC m=+994.793950983 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs") pod "openstack-operator-controller-manager-c595dbb59-xc2xn" (UID: "7a0ecc5c-eebe-457b-9a72-804226878c7f") : secret "webhook-server-cert" not found Jan 27 08:08:01 crc kubenswrapper[4787]: E0127 08:08:01.141643 4787 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs podName:7a0ecc5c-eebe-457b-9a72-804226878c7f nodeName:}" failed. No retries permitted until 2026-01-27 08:08:09.141632962 +0000 UTC m=+994.793988454 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs") pod "openstack-operator-controller-manager-c595dbb59-xc2xn" (UID: "7a0ecc5c-eebe-457b-9a72-804226878c7f") : secret "metrics-server-cert" not found Jan 27 08:08:07 crc kubenswrapper[4787]: E0127 08:08:07.134403 4787 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/lmiccini/heat-operator@sha256:027f3118543388d561b452a9777783b1f866ffaf59d9a1b16a225b1c5636111f" Jan 27 08:08:07 crc kubenswrapper[4787]: E0127 08:08:07.135148 4787 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/heat-operator@sha256:027f3118543388d561b452a9777783b1f866ffaf59d9a1b16a225b1c5636111f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qv4mh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-575ffb885b-57zhq_openstack-operators(f621b9f3-ead9-4fab-b33f-f9d6179e8f3f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 27 08:08:07 crc kubenswrapper[4787]: E0127 08:08:07.136351 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq" podUID="f621b9f3-ead9-4fab-b33f-f9d6179e8f3f" Jan 27 08:08:07 crc kubenswrapper[4787]: E0127 08:08:07.613905 4787 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/lmiccini/neutron-operator@sha256:14786c3a66c41213a03d6375c03209f22d439dd6e752317ddcbe21dda66bb569" Jan 27 08:08:07 crc kubenswrapper[4787]: E0127 08:08:07.614134 4787 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/neutron-operator@sha256:14786c3a66c41213a03d6375c03209f22d439dd6e752317ddcbe21dda66bb569,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z8r9f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-7ffd8d76d4-vk5vt_openstack-operators(be114c8e-3aa0-41b4-9954-d07c800d3cfc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 27 08:08:07 crc kubenswrapper[4787]: E0127 08:08:07.615758 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt" podUID="be114c8e-3aa0-41b4-9954-d07c800d3cfc" Jan 27 08:08:07 crc kubenswrapper[4787]: E0127 08:08:07.740590 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/neutron-operator@sha256:14786c3a66c41213a03d6375c03209f22d439dd6e752317ddcbe21dda66bb569\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt" podUID="be114c8e-3aa0-41b4-9954-d07c800d3cfc" Jan 27 08:08:07 crc kubenswrapper[4787]: E0127 08:08:07.741200 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/heat-operator@sha256:027f3118543388d561b452a9777783b1f866ffaf59d9a1b16a225b1c5636111f\\\"\"" pod="openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq" podUID="f621b9f3-ead9-4fab-b33f-f9d6179e8f3f" Jan 27 08:08:08 crc kubenswrapper[4787]: I0127 08:08:08.290863 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert\") pod \"infra-operator-controller-manager-7d75bc88d5-drtqw\" (UID: \"c634491f-6069-472d-bff7-d8903d0afa1d\") " pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:08:08 crc kubenswrapper[4787]: I0127 08:08:08.299209 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c634491f-6069-472d-bff7-d8903d0afa1d-cert\") pod \"infra-operator-controller-manager-7d75bc88d5-drtqw\" (UID: \"c634491f-6069-472d-bff7-d8903d0afa1d\") " pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:08:08 crc kubenswrapper[4787]: I0127 08:08:08.509653 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:08:08 crc kubenswrapper[4787]: I0127 08:08:08.597155 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq\" (UID: \"116b0532-a8d5-47ec-8e12-e7ef482094d6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:08:08 crc kubenswrapper[4787]: I0127 08:08:08.602847 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/116b0532-a8d5-47ec-8e12-e7ef482094d6-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq\" (UID: \"116b0532-a8d5-47ec-8e12-e7ef482094d6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:08:08 crc kubenswrapper[4787]: I0127 08:08:08.782458 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:08:09 crc kubenswrapper[4787]: I0127 08:08:09.207804 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:08:09 crc kubenswrapper[4787]: I0127 08:08:09.208020 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:08:09 crc kubenswrapper[4787]: I0127 08:08:09.212465 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-webhook-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:08:09 crc kubenswrapper[4787]: I0127 08:08:09.213307 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7a0ecc5c-eebe-457b-9a72-804226878c7f-metrics-certs\") pod \"openstack-operator-controller-manager-c595dbb59-xc2xn\" (UID: \"7a0ecc5c-eebe-457b-9a72-804226878c7f\") " pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:08:09 crc kubenswrapper[4787]: I0127 08:08:09.483450 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:08:14 crc kubenswrapper[4787]: E0127 08:08:14.785927 4787 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/lmiccini/keystone-operator@sha256:008a2e338430e7dd513f81f66320cc5c1332c332a3191b537d75786489d7f487" Jan 27 08:08:14 crc kubenswrapper[4787]: E0127 08:08:14.786645 4787 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/keystone-operator@sha256:008a2e338430e7dd513f81f66320cc5c1332c332a3191b537d75786489d7f487,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jzws7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-55f684fd56-2k8pd_openstack-operators(32019c4d-61e3-4c27-86d4-3a79bb40ce70): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 27 08:08:14 crc kubenswrapper[4787]: E0127 08:08:14.787943 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd" podUID="32019c4d-61e3-4c27-86d4-3a79bb40ce70" Jan 27 08:08:14 crc kubenswrapper[4787]: E0127 08:08:14.800276 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/keystone-operator@sha256:008a2e338430e7dd513f81f66320cc5c1332c332a3191b537d75786489d7f487\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd" podUID="32019c4d-61e3-4c27-86d4-3a79bb40ce70" Jan 27 08:08:15 crc kubenswrapper[4787]: E0127 08:08:15.266420 4787 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Jan 27 08:08:15 crc kubenswrapper[4787]: E0127 08:08:15.266987 4787 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g7r9d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-5lxsh_openstack-operators(2cf3085a-51c4-42a7-a788-8e9dd9dbe4c7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 27 08:08:15 crc kubenswrapper[4787]: E0127 08:08:15.268241 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5lxsh" podUID="2cf3085a-51c4-42a7-a788-8e9dd9dbe4c7" Jan 27 08:08:15 crc kubenswrapper[4787]: E0127 08:08:15.806526 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5lxsh" podUID="2cf3085a-51c4-42a7-a788-8e9dd9dbe4c7" Jan 27 08:08:16 crc kubenswrapper[4787]: E0127 08:08:16.701322 4787 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.47:5001/openstack-k8s-operators/nova-operator:1804867abea6e84ca4655efc2800c4a3a97f8b26" Jan 27 08:08:16 crc kubenswrapper[4787]: E0127 08:08:16.701449 4787 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.47:5001/openstack-k8s-operators/nova-operator:1804867abea6e84ca4655efc2800c4a3a97f8b26" Jan 27 08:08:16 crc kubenswrapper[4787]: E0127 08:08:16.701690 4787 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.47:5001/openstack-k8s-operators/nova-operator:1804867abea6e84ca4655efc2800c4a3a97f8b26,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2vdjb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-57d6b69d8b-s8kvs_openstack-operators(3ad17a28-2cb1-4455-8d38-58919a287ae6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 27 08:08:16 crc kubenswrapper[4787]: E0127 08:08:16.702814 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" podUID="3ad17a28-2cb1-4455-8d38-58919a287ae6" Jan 27 08:08:16 crc kubenswrapper[4787]: E0127 08:08:16.819277 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.47:5001/openstack-k8s-operators/nova-operator:1804867abea6e84ca4655efc2800c4a3a97f8b26\\\"\"" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" podUID="3ad17a28-2cb1-4455-8d38-58919a287ae6" Jan 27 08:08:18 crc kubenswrapper[4787]: I0127 08:08:18.833832 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-67dd55ff59-kk6mj" event={"ID":"be4be3e5-1589-4e84-9d35-f104bc3d5ad4","Type":"ContainerStarted","Data":"0f31d458943d75c1d98606b52d1813ebba2a16540349e53bf19b6976e43ffa35"} Jan 27 08:08:18 crc kubenswrapper[4787]: I0127 08:08:18.835479 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-67dd55ff59-kk6mj" Jan 27 08:08:18 crc kubenswrapper[4787]: I0127 08:08:18.880026 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-67dd55ff59-kk6mj" podStartSLOduration=5.005196086 podStartE2EDuration="26.87999664s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.202606085 +0000 UTC m=+979.854961577" lastFinishedPulling="2026-01-27 08:08:16.077406639 +0000 UTC m=+1001.729762131" observedRunningTime="2026-01-27 08:08:18.879160132 +0000 UTC m=+1004.531515624" watchObservedRunningTime="2026-01-27 08:08:18.87999664 +0000 UTC m=+1004.532352152" Jan 27 08:08:18 crc kubenswrapper[4787]: I0127 08:08:18.931418 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn"] Jan 27 08:08:18 crc kubenswrapper[4787]: I0127 08:08:18.945130 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw"] Jan 27 08:08:18 crc kubenswrapper[4787]: W0127 08:08:18.955325 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a0ecc5c_eebe_457b_9a72_804226878c7f.slice/crio-ddb2d1a4499089659acfaa98698f1932dea208cdf10dd56354008bbc81026c46 WatchSource:0}: Error finding container ddb2d1a4499089659acfaa98698f1932dea208cdf10dd56354008bbc81026c46: Status 404 returned error can't find the container with id ddb2d1a4499089659acfaa98698f1932dea208cdf10dd56354008bbc81026c46 Jan 27 08:08:18 crc kubenswrapper[4787]: W0127 08:08:18.979215 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc634491f_6069_472d_bff7_d8903d0afa1d.slice/crio-de9ec2eae17d265ff4e0b874fbf2c37a473b01453b98faa77c82ada974c18bc4 WatchSource:0}: Error finding container de9ec2eae17d265ff4e0b874fbf2c37a473b01453b98faa77c82ada974c18bc4: Status 404 returned error can't find the container with id de9ec2eae17d265ff4e0b874fbf2c37a473b01453b98faa77c82ada974c18bc4 Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.031385 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq"] Jan 27 08:08:19 crc kubenswrapper[4787]: W0127 08:08:19.057608 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod116b0532_a8d5_47ec_8e12_e7ef482094d6.slice/crio-973e696b1f986786fc50d2c8193625a258b0f496dc0d40214f3f6c7debf4fd6e WatchSource:0}: Error finding container 973e696b1f986786fc50d2c8193625a258b0f496dc0d40214f3f6c7debf4fd6e: Status 404 returned error can't find the container with id 973e696b1f986786fc50d2c8193625a258b0f496dc0d40214f3f6c7debf4fd6e Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.849414 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" event={"ID":"c634491f-6069-472d-bff7-d8903d0afa1d","Type":"ContainerStarted","Data":"de9ec2eae17d265ff4e0b874fbf2c37a473b01453b98faa77c82ada974c18bc4"} Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.856150 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-77554cdc5c-627g6" event={"ID":"dd8dbc24-8fb8-4ae2-96c3-b7112c7d7c65","Type":"ContainerStarted","Data":"b5bd5e6d329752bfebf8b5ba174c28170d8e3445e248f0671a1f6513805d45e3"} Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.856345 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-77554cdc5c-627g6" Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.866302 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" event={"ID":"7a0ecc5c-eebe-457b-9a72-804226878c7f","Type":"ContainerStarted","Data":"2de6f1e5f20624495fa30047f790219dd72f0825754dadf23e6dcd3f0ddbe5ad"} Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.866358 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" event={"ID":"7a0ecc5c-eebe-457b-9a72-804226878c7f","Type":"ContainerStarted","Data":"ddb2d1a4499089659acfaa98698f1932dea208cdf10dd56354008bbc81026c46"} Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.866515 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.872299 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-799bc87c89-cqpjk" event={"ID":"7986c3dd-6d06-4892-9fed-1b396669685b","Type":"ContainerStarted","Data":"1be013423657fa04611ffefe50af5dcd6810d660d35d2528fd2f945d7dcba752"} Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.872431 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-799bc87c89-cqpjk" Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.884823 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-77554cdc5c-627g6" podStartSLOduration=5.287959079 podStartE2EDuration="27.884775852s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:53.9831216 +0000 UTC m=+979.635477092" lastFinishedPulling="2026-01-27 08:08:16.579938373 +0000 UTC m=+1002.232293865" observedRunningTime="2026-01-27 08:08:19.880857797 +0000 UTC m=+1005.533213289" watchObservedRunningTime="2026-01-27 08:08:19.884775852 +0000 UTC m=+1005.537131364" Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.887236 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq" event={"ID":"f621b9f3-ead9-4fab-b33f-f9d6179e8f3f","Type":"ContainerStarted","Data":"a9287b80626111fc77fa40695afbcb8ab4d027be7418ae412ab1891f08b7b524"} Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.887513 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq" Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.901826 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-88gn4" event={"ID":"e59e94f5-9033-488d-a186-476cb6cbb3f0","Type":"ContainerStarted","Data":"8ff0a77e5cc8c91e8c6f022dbfd2c9c2acc980b0caa5690479cfe7c634b5f1c0"} Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.902179 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-88gn4" Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.907792 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-768b776ffb-j7klk" event={"ID":"ff52e5a2-2aab-451a-8869-72c1a506940a","Type":"ContainerStarted","Data":"38a2e0de94e8f3188ee7fde0ada4d229450b767f6b947b8aa35559222b7d155d"} Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.907971 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-768b776ffb-j7klk" Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.914696 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" event={"ID":"116b0532-a8d5-47ec-8e12-e7ef482094d6","Type":"ContainerStarted","Data":"973e696b1f986786fc50d2c8193625a258b0f496dc0d40214f3f6c7debf4fd6e"} Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.929332 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9" event={"ID":"292c617a-b64f-4b53-b05e-360769308e43","Type":"ContainerStarted","Data":"fba5e0adbd3c18024e0639401644f737fc506527b0fc0c84900001b8fd783e12"} Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.929994 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9" Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.936399 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-799bc87c89-cqpjk" podStartSLOduration=6.296015523 podStartE2EDuration="27.936381387s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.437057375 +0000 UTC m=+980.089412867" lastFinishedPulling="2026-01-27 08:08:16.077423239 +0000 UTC m=+1001.729778731" observedRunningTime="2026-01-27 08:08:19.93422915 +0000 UTC m=+1005.586584642" watchObservedRunningTime="2026-01-27 08:08:19.936381387 +0000 UTC m=+1005.588736879" Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.942464 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7" event={"ID":"7cae616e-6aa8-405f-b10a-2a5346fae5b4","Type":"ContainerStarted","Data":"3196d989fff66b6ff15b1e99011b6e0718e2a77094020f3c80a47155709a22be"} Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.942852 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7" Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.954897 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-65ff799cfd-d5r5z" event={"ID":"4b5515cb-f539-4a9b-8c46-36c5c62c5c93","Type":"ContainerStarted","Data":"844228fe2286c5cdba986b68dd287d59189c52a7cab3383d1d52734682aab299"} Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.955798 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-65ff799cfd-d5r5z" Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.964055 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-655bf9cfbb-6tnpg" event={"ID":"85bafad5-30f3-4931-bd2a-0d45e2b0f844","Type":"ContainerStarted","Data":"76cbf57d6c49b05bdfd746f2e6cc6fafee24a6ad6f3c688eecc680826f7327e9"} Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.964217 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-655bf9cfbb-6tnpg" Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.969314 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5" event={"ID":"601fe40b-5553-4225-b5bd-214428d6fa68","Type":"ContainerStarted","Data":"03aa5290b47cccc13e1f81c46272c88171072ef4530aaa1d0848a606eb021f35"} Jan 27 08:08:19 crc kubenswrapper[4787]: I0127 08:08:19.970185 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.007410 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" podStartSLOduration=28.007379524 podStartE2EDuration="28.007379524s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 08:08:19.971867991 +0000 UTC m=+1005.624223493" watchObservedRunningTime="2026-01-27 08:08:20.007379524 +0000 UTC m=+1005.659735026" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.018463 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-dnxpj" event={"ID":"7ec92388-f799-43cc-9235-6d4b717bc98e","Type":"ContainerStarted","Data":"ad7673d247dc1b77c422c17804d53946b34ef7be2378e4c17588feee5e3862b1"} Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.019232 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-dnxpj" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.039388 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-65ff799cfd-d5r5z" podStartSLOduration=3.306643052 podStartE2EDuration="28.039359202s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:53.645333788 +0000 UTC m=+979.297689280" lastFinishedPulling="2026-01-27 08:08:18.378049938 +0000 UTC m=+1004.030405430" observedRunningTime="2026-01-27 08:08:20.002934198 +0000 UTC m=+1005.655289690" watchObservedRunningTime="2026-01-27 08:08:20.039359202 +0000 UTC m=+1005.691714714" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.046264 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-849fcfbb6b-tbr5m" event={"ID":"59efc0ff-5727-48f9-91b7-36533ba5f94a","Type":"ContainerStarted","Data":"3ee22adc2b860fbef8f8b83dca320796dd722076ae8abdfae507dc0c1e75861e"} Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.047032 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-849fcfbb6b-tbr5m" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.061469 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7" podStartSLOduration=4.371686856 podStartE2EDuration="28.061441733s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.76803674 +0000 UTC m=+980.420392232" lastFinishedPulling="2026-01-27 08:08:18.457791607 +0000 UTC m=+1004.110147109" observedRunningTime="2026-01-27 08:08:20.035448276 +0000 UTC m=+1005.687803768" watchObservedRunningTime="2026-01-27 08:08:20.061441733 +0000 UTC m=+1005.713797235" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.085499 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-pxbjw" event={"ID":"89d7a7e7-443a-486f-b8b7-a024082b9fba","Type":"ContainerStarted","Data":"15782e1eb7eb94093c1ae1c96c50e2d3bcdddb22c7c9008b13406a1ba759f072"} Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.086039 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-pxbjw" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.092270 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5" podStartSLOduration=4.3332949 podStartE2EDuration="28.092251415s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.690766236 +0000 UTC m=+980.343121728" lastFinishedPulling="2026-01-27 08:08:18.449722751 +0000 UTC m=+1004.102078243" observedRunningTime="2026-01-27 08:08:20.073509566 +0000 UTC m=+1005.725865058" watchObservedRunningTime="2026-01-27 08:08:20.092251415 +0000 UTC m=+1005.744606907" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.105276 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-qhnz4" event={"ID":"a552e9e7-f9fa-4b71-9ec6-848a2230279f","Type":"ContainerStarted","Data":"3a0b5d26ab0e99e59e1f05644c2f72033c6151ac3781abe42dfb805a24b13f9b"} Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.106450 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-qhnz4" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.119658 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-88gn4" podStartSLOduration=6.280199037 podStartE2EDuration="28.119635051s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.237992136 +0000 UTC m=+979.890347628" lastFinishedPulling="2026-01-27 08:08:16.07742815 +0000 UTC m=+1001.729783642" observedRunningTime="2026-01-27 08:08:20.106949645 +0000 UTC m=+1005.759305157" watchObservedRunningTime="2026-01-27 08:08:20.119635051 +0000 UTC m=+1005.771990543" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.133662 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm" event={"ID":"a21cc0b9-75d2-4ddb-925e-23eeaac2dd35","Type":"ContainerStarted","Data":"a37adf6b59116969238245b575db674fd06aeada5b8d8f23014c3dc389766980"} Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.134203 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.143009 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-655bf9cfbb-6tnpg" podStartSLOduration=5.656924892 podStartE2EDuration="28.142988731s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:53.593377325 +0000 UTC m=+979.245732817" lastFinishedPulling="2026-01-27 08:08:16.079441144 +0000 UTC m=+1001.731796656" observedRunningTime="2026-01-27 08:08:20.142198873 +0000 UTC m=+1005.794554375" watchObservedRunningTime="2026-01-27 08:08:20.142988731 +0000 UTC m=+1005.795344233" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.183829 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq" podStartSLOduration=3.060496034 podStartE2EDuration="28.18379951s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.426065775 +0000 UTC m=+980.078421267" lastFinishedPulling="2026-01-27 08:08:19.549369251 +0000 UTC m=+1005.201724743" observedRunningTime="2026-01-27 08:08:20.176670335 +0000 UTC m=+1005.829025837" watchObservedRunningTime="2026-01-27 08:08:20.18379951 +0000 UTC m=+1005.836155002" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.244320 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9" podStartSLOduration=4.483042234 podStartE2EDuration="28.244293549s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.722713412 +0000 UTC m=+980.375068904" lastFinishedPulling="2026-01-27 08:08:18.483964737 +0000 UTC m=+1004.136320219" observedRunningTime="2026-01-27 08:08:20.243981482 +0000 UTC m=+1005.896336974" watchObservedRunningTime="2026-01-27 08:08:20.244293549 +0000 UTC m=+1005.896649041" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.279846 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-768b776ffb-j7klk" podStartSLOduration=5.380021247 podStartE2EDuration="28.279823474s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:53.677857106 +0000 UTC m=+979.330212598" lastFinishedPulling="2026-01-27 08:08:16.577659333 +0000 UTC m=+1002.230014825" observedRunningTime="2026-01-27 08:08:20.278503805 +0000 UTC m=+1005.930859297" watchObservedRunningTime="2026-01-27 08:08:20.279823474 +0000 UTC m=+1005.932178966" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.332342 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-dnxpj" podStartSLOduration=6.4063432670000005 podStartE2EDuration="28.332314707s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.655004216 +0000 UTC m=+980.307359708" lastFinishedPulling="2026-01-27 08:08:16.580975656 +0000 UTC m=+1002.233331148" observedRunningTime="2026-01-27 08:08:20.316007821 +0000 UTC m=+1005.968363313" watchObservedRunningTime="2026-01-27 08:08:20.332314707 +0000 UTC m=+1005.984670199" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.359922 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-qhnz4" podStartSLOduration=6.117288037 podStartE2EDuration="28.359902229s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.437418544 +0000 UTC m=+980.089774036" lastFinishedPulling="2026-01-27 08:08:16.680032746 +0000 UTC m=+1002.332388228" observedRunningTime="2026-01-27 08:08:20.35630919 +0000 UTC m=+1006.008664682" watchObservedRunningTime="2026-01-27 08:08:20.359902229 +0000 UTC m=+1006.012257721" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.406150 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-849fcfbb6b-tbr5m" podStartSLOduration=6.521752923 podStartE2EDuration="28.406119036s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.193022676 +0000 UTC m=+979.845378168" lastFinishedPulling="2026-01-27 08:08:16.077388789 +0000 UTC m=+1001.729744281" observedRunningTime="2026-01-27 08:08:20.397181691 +0000 UTC m=+1006.049537183" watchObservedRunningTime="2026-01-27 08:08:20.406119036 +0000 UTC m=+1006.058474528" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.423405 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm" podStartSLOduration=4.748493359 podStartE2EDuration="28.423388052s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.76161527 +0000 UTC m=+980.413970762" lastFinishedPulling="2026-01-27 08:08:18.436509963 +0000 UTC m=+1004.088865455" observedRunningTime="2026-01-27 08:08:20.422410211 +0000 UTC m=+1006.074765703" watchObservedRunningTime="2026-01-27 08:08:20.423388052 +0000 UTC m=+1006.075743544" Jan 27 08:08:20 crc kubenswrapper[4787]: I0127 08:08:20.455040 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-pxbjw" podStartSLOduration=6.195941301 podStartE2EDuration="28.455008582s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.442257129 +0000 UTC m=+980.094612621" lastFinishedPulling="2026-01-27 08:08:16.7013244 +0000 UTC m=+1002.353679902" observedRunningTime="2026-01-27 08:08:20.443607934 +0000 UTC m=+1006.095963426" watchObservedRunningTime="2026-01-27 08:08:20.455008582 +0000 UTC m=+1006.107364064" Jan 27 08:08:22 crc kubenswrapper[4787]: I0127 08:08:22.823076 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:08:22 crc kubenswrapper[4787]: I0127 08:08:22.823527 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:08:23 crc kubenswrapper[4787]: I0127 08:08:23.078679 4787 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 27 08:08:23 crc kubenswrapper[4787]: I0127 08:08:23.158263 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" event={"ID":"c634491f-6069-472d-bff7-d8903d0afa1d","Type":"ContainerStarted","Data":"9a5f9af42577fe0535c92baf06b60bc3890e044dd01088d7c9e763662aae3dc0"} Jan 27 08:08:23 crc kubenswrapper[4787]: I0127 08:08:23.158387 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:08:23 crc kubenswrapper[4787]: I0127 08:08:23.161625 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" event={"ID":"116b0532-a8d5-47ec-8e12-e7ef482094d6","Type":"ContainerStarted","Data":"98f7a5376e8970cb01e8781bb3f4d9c7e3e9eed78127e6726f1a694774c92eda"} Jan 27 08:08:23 crc kubenswrapper[4787]: I0127 08:08:23.161925 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:08:23 crc kubenswrapper[4787]: I0127 08:08:23.191136 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" podStartSLOduration=27.247732896 podStartE2EDuration="31.191107743s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:08:18.983579928 +0000 UTC m=+1004.635935420" lastFinishedPulling="2026-01-27 08:08:22.926954775 +0000 UTC m=+1008.579310267" observedRunningTime="2026-01-27 08:08:23.180140864 +0000 UTC m=+1008.832496346" watchObservedRunningTime="2026-01-27 08:08:23.191107743 +0000 UTC m=+1008.843463235" Jan 27 08:08:23 crc kubenswrapper[4787]: I0127 08:08:23.219230 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" podStartSLOduration=27.344525527 podStartE2EDuration="31.219199346s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:08:19.061924876 +0000 UTC m=+1004.714280368" lastFinishedPulling="2026-01-27 08:08:22.936598705 +0000 UTC m=+1008.588954187" observedRunningTime="2026-01-27 08:08:23.213270857 +0000 UTC m=+1008.865626339" watchObservedRunningTime="2026-01-27 08:08:23.219199346 +0000 UTC m=+1008.871554838" Jan 27 08:08:24 crc kubenswrapper[4787]: I0127 08:08:24.170600 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt" event={"ID":"be114c8e-3aa0-41b4-9954-d07c800d3cfc","Type":"ContainerStarted","Data":"2257e99f9ac6108ca9b22a6fd685af6bf45dd232de7c606a67622748d69995ef"} Jan 27 08:08:24 crc kubenswrapper[4787]: I0127 08:08:24.171821 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt" Jan 27 08:08:28 crc kubenswrapper[4787]: I0127 08:08:28.518017 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-7d75bc88d5-drtqw" Jan 27 08:08:28 crc kubenswrapper[4787]: I0127 08:08:28.542722 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt" podStartSLOduration=7.576213869 podStartE2EDuration="36.542698717s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.685315287 +0000 UTC m=+980.337670779" lastFinishedPulling="2026-01-27 08:08:23.651800135 +0000 UTC m=+1009.304155627" observedRunningTime="2026-01-27 08:08:24.212310033 +0000 UTC m=+1009.864665525" watchObservedRunningTime="2026-01-27 08:08:28.542698717 +0000 UTC m=+1014.195054209" Jan 27 08:08:28 crc kubenswrapper[4787]: I0127 08:08:28.793518 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq" Jan 27 08:08:29 crc kubenswrapper[4787]: I0127 08:08:29.489993 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-c595dbb59-xc2xn" Jan 27 08:08:30 crc kubenswrapper[4787]: I0127 08:08:30.228407 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5lxsh" event={"ID":"2cf3085a-51c4-42a7-a788-8e9dd9dbe4c7","Type":"ContainerStarted","Data":"2e67358ef5452f8e65588979d6ba23bd18fabf7c82969f0f92f0255c75a39f77"} Jan 27 08:08:30 crc kubenswrapper[4787]: I0127 08:08:30.248764 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5lxsh" podStartSLOduration=2.447435819 podStartE2EDuration="37.248745205s" podCreationTimestamp="2026-01-27 08:07:53 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.684667132 +0000 UTC m=+980.337022624" lastFinishedPulling="2026-01-27 08:08:29.485976528 +0000 UTC m=+1015.138332010" observedRunningTime="2026-01-27 08:08:30.246572138 +0000 UTC m=+1015.898927620" watchObservedRunningTime="2026-01-27 08:08:30.248745205 +0000 UTC m=+1015.901100697" Jan 27 08:08:31 crc kubenswrapper[4787]: I0127 08:08:31.237771 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" event={"ID":"3ad17a28-2cb1-4455-8d38-58919a287ae6","Type":"ContainerStarted","Data":"e67cc281b7eb394ed1cf80319838ad97dd8c88447cab1e5fbeb55f20bbbe9488"} Jan 27 08:08:31 crc kubenswrapper[4787]: I0127 08:08:31.238342 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" Jan 27 08:08:31 crc kubenswrapper[4787]: I0127 08:08:31.239735 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd" event={"ID":"32019c4d-61e3-4c27-86d4-3a79bb40ce70","Type":"ContainerStarted","Data":"ea44ab5dd60dbac9f99e762e21c703c8457dfab59c68b898d6406903dc281b4a"} Jan 27 08:08:31 crc kubenswrapper[4787]: I0127 08:08:31.239894 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd" Jan 27 08:08:31 crc kubenswrapper[4787]: I0127 08:08:31.258369 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" podStartSLOduration=3.2881490270000002 podStartE2EDuration="39.258353473s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:54.187459414 +0000 UTC m=+979.839814896" lastFinishedPulling="2026-01-27 08:08:30.15766385 +0000 UTC m=+1015.810019342" observedRunningTime="2026-01-27 08:08:31.25732867 +0000 UTC m=+1016.909684152" watchObservedRunningTime="2026-01-27 08:08:31.258353473 +0000 UTC m=+1016.910708965" Jan 27 08:08:31 crc kubenswrapper[4787]: I0127 08:08:31.277585 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd" podStartSLOduration=2.229673545 podStartE2EDuration="39.277550591s" podCreationTimestamp="2026-01-27 08:07:52 +0000 UTC" firstStartedPulling="2026-01-27 08:07:53.447004184 +0000 UTC m=+979.099359676" lastFinishedPulling="2026-01-27 08:08:30.49488123 +0000 UTC m=+1016.147236722" observedRunningTime="2026-01-27 08:08:31.274384482 +0000 UTC m=+1016.926739974" watchObservedRunningTime="2026-01-27 08:08:31.277550591 +0000 UTC m=+1016.929906083" Jan 27 08:08:32 crc kubenswrapper[4787]: I0127 08:08:32.452944 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-655bf9cfbb-6tnpg" Jan 27 08:08:32 crc kubenswrapper[4787]: I0127 08:08:32.684843 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-768b776ffb-j7klk" Jan 27 08:08:32 crc kubenswrapper[4787]: I0127 08:08:32.731256 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-65ff799cfd-d5r5z" Jan 27 08:08:32 crc kubenswrapper[4787]: I0127 08:08:32.807381 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-77554cdc5c-627g6" Jan 27 08:08:32 crc kubenswrapper[4787]: I0127 08:08:32.846686 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-67dd55ff59-kk6mj" Jan 27 08:08:32 crc kubenswrapper[4787]: I0127 08:08:32.864187 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-575ffb885b-57zhq" Jan 27 08:08:32 crc kubenswrapper[4787]: I0127 08:08:32.891035 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-vkmnm" Jan 27 08:08:32 crc kubenswrapper[4787]: I0127 08:08:32.891135 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-849fcfbb6b-tbr5m" Jan 27 08:08:32 crc kubenswrapper[4787]: I0127 08:08:32.956591 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-88gn4" Jan 27 08:08:33 crc kubenswrapper[4787]: I0127 08:08:33.021419 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-7ffd8d76d4-vk5vt" Jan 27 08:08:33 crc kubenswrapper[4787]: I0127 08:08:33.025257 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-qhnz4" Jan 27 08:08:33 crc kubenswrapper[4787]: I0127 08:08:33.113759 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-7875d7675-x4nz5" Jan 27 08:08:33 crc kubenswrapper[4787]: I0127 08:08:33.145521 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-799bc87c89-cqpjk" Jan 27 08:08:33 crc kubenswrapper[4787]: I0127 08:08:33.245266 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-l9mc7" Jan 27 08:08:33 crc kubenswrapper[4787]: I0127 08:08:33.360837 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-pxbjw" Jan 27 08:08:33 crc kubenswrapper[4787]: I0127 08:08:33.466513 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-dnxpj" Jan 27 08:08:33 crc kubenswrapper[4787]: I0127 08:08:33.500714 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-75db85654f-8cmb9" Jan 27 08:08:42 crc kubenswrapper[4787]: I0127 08:08:42.715595 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-55f684fd56-2k8pd" Jan 27 08:08:42 crc kubenswrapper[4787]: I0127 08:08:42.778184 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.767428 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/rabbitmq-server-0"] Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.769483 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.773541 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-erlang-cookie" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.774480 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-default-user" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.776340 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-server-dockercfg-f62tp" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.777448 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"kube-root-ca.crt" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.777678 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-plugins-conf" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.777868 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-server-conf" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.778013 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openshift-service-ca.crt" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.790888 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-server-0"] Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.857465 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.857616 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.878681 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.878765 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.878791 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.878821 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.878835 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bxjz\" (UniqueName: \"kubernetes.io/projected/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-kube-api-access-6bxjz\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.878861 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9a6dbd2e-1018-4ddf-90f4-9f08dd92aff2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9a6dbd2e-1018-4ddf-90f4-9f08dd92aff2\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.878892 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.878912 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.878937 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.980049 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.980101 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.980134 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.980149 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bxjz\" (UniqueName: \"kubernetes.io/projected/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-kube-api-access-6bxjz\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.980180 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9a6dbd2e-1018-4ddf-90f4-9f08dd92aff2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9a6dbd2e-1018-4ddf-90f4-9f08dd92aff2\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.980207 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.980228 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.980253 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.980295 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.981503 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.981771 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.982030 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.982431 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.987581 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.987685 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.995734 4787 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 27 08:08:52 crc kubenswrapper[4787]: I0127 08:08:52.995788 4787 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9a6dbd2e-1018-4ddf-90f4-9f08dd92aff2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9a6dbd2e-1018-4ddf-90f4-9f08dd92aff2\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/47535e156a7e386aa4a04a5f1bf6b12c634363205396f5d1030723c8fdd236ad/globalmount\"" pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.004478 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bxjz\" (UniqueName: \"kubernetes.io/projected/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-kube-api-access-6bxjz\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.007468 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f84bfff4-b4f2-40d8-8b81-a9e5eb776442-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.024840 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/rabbitmq-broadcaster-server-0"] Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.025839 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9a6dbd2e-1018-4ddf-90f4-9f08dd92aff2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9a6dbd2e-1018-4ddf-90f4-9f08dd92aff2\") pod \"rabbitmq-server-0\" (UID: \"f84bfff4-b4f2-40d8-8b81-a9e5eb776442\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.030417 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.036169 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-broadcaster-default-user" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.036377 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-broadcaster-server-conf" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.036532 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-broadcaster-server-dockercfg-w4jtp" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.036564 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-broadcaster-plugins-conf" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.041622 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-broadcaster-server-0"] Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.044088 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-broadcaster-erlang-cookie" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.091870 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.194509 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/68be2504-1de8-427c-9937-bd0428f7c5c4-erlang-cookie-secret\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.195043 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/68be2504-1de8-427c-9937-bd0428f7c5c4-rabbitmq-plugins\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.195069 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/68be2504-1de8-427c-9937-bd0428f7c5c4-pod-info\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.195098 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/68be2504-1de8-427c-9937-bd0428f7c5c4-server-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.195146 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmzmt\" (UniqueName: \"kubernetes.io/projected/68be2504-1de8-427c-9937-bd0428f7c5c4-kube-api-access-fmzmt\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.195480 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/68be2504-1de8-427c-9937-bd0428f7c5c4-rabbitmq-confd\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.195870 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6f32bada-121e-4882-a6c8-085f9814aca8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f32bada-121e-4882-a6c8-085f9814aca8\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.195919 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/68be2504-1de8-427c-9937-bd0428f7c5c4-plugins-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.195959 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/68be2504-1de8-427c-9937-bd0428f7c5c4-rabbitmq-erlang-cookie\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.297379 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmzmt\" (UniqueName: \"kubernetes.io/projected/68be2504-1de8-427c-9937-bd0428f7c5c4-kube-api-access-fmzmt\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.297425 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/68be2504-1de8-427c-9937-bd0428f7c5c4-rabbitmq-confd\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.297497 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6f32bada-121e-4882-a6c8-085f9814aca8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f32bada-121e-4882-a6c8-085f9814aca8\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.297525 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/68be2504-1de8-427c-9937-bd0428f7c5c4-plugins-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.297567 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/68be2504-1de8-427c-9937-bd0428f7c5c4-rabbitmq-erlang-cookie\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.297590 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/68be2504-1de8-427c-9937-bd0428f7c5c4-erlang-cookie-secret\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.297612 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/68be2504-1de8-427c-9937-bd0428f7c5c4-rabbitmq-plugins\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.297632 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/68be2504-1de8-427c-9937-bd0428f7c5c4-pod-info\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.297653 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/68be2504-1de8-427c-9937-bd0428f7c5c4-server-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.299018 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/68be2504-1de8-427c-9937-bd0428f7c5c4-rabbitmq-plugins\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.299383 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/68be2504-1de8-427c-9937-bd0428f7c5c4-server-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.299967 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/68be2504-1de8-427c-9937-bd0428f7c5c4-rabbitmq-erlang-cookie\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.300058 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/68be2504-1de8-427c-9937-bd0428f7c5c4-plugins-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.305236 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/68be2504-1de8-427c-9937-bd0428f7c5c4-erlang-cookie-secret\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.305308 4787 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.305333 4787 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6f32bada-121e-4882-a6c8-085f9814aca8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f32bada-121e-4882-a6c8-085f9814aca8\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/13f5f52254cfc6354b5820b49c0e7af456791e629d5b614bbb3f5fbac1a0f064/globalmount\"" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.306138 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/68be2504-1de8-427c-9937-bd0428f7c5c4-pod-info\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.320640 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/68be2504-1de8-427c-9937-bd0428f7c5c4-rabbitmq-confd\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.328040 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmzmt\" (UniqueName: \"kubernetes.io/projected/68be2504-1de8-427c-9937-bd0428f7c5c4-kube-api-access-fmzmt\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.357092 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/rabbitmq-cell1-server-0"] Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.359388 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.365077 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-cell1-server-0"] Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.366380 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-cell1-erlang-cookie" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.366601 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-cell1-plugins-conf" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.367151 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-cell1-server-dockercfg-f2rj8" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.368385 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-cell1-server-conf" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.368734 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-cell1-default-user" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.371877 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6f32bada-121e-4882-a6c8-085f9814aca8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f32bada-121e-4882-a6c8-085f9814aca8\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"68be2504-1de8-427c-9937-bd0428f7c5c4\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.382622 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.500619 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/faff0a15-880a-4cf7-a0e0-81d573ace274-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.500669 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pl46\" (UniqueName: \"kubernetes.io/projected/faff0a15-880a-4cf7-a0e0-81d573ace274-kube-api-access-4pl46\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.500695 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/faff0a15-880a-4cf7-a0e0-81d573ace274-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.500736 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/faff0a15-880a-4cf7-a0e0-81d573ace274-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.500829 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/faff0a15-880a-4cf7-a0e0-81d573ace274-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.501116 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/faff0a15-880a-4cf7-a0e0-81d573ace274-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.501184 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9be3783e-45f7-4421-9e4f-1bba804afd6b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9be3783e-45f7-4421-9e4f-1bba804afd6b\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.501346 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/faff0a15-880a-4cf7-a0e0-81d573ace274-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.501382 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/faff0a15-880a-4cf7-a0e0-81d573ace274-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.541061 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-server-0"] Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.602686 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/faff0a15-880a-4cf7-a0e0-81d573ace274-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.603286 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/faff0a15-880a-4cf7-a0e0-81d573ace274-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.603313 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/faff0a15-880a-4cf7-a0e0-81d573ace274-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.603376 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/faff0a15-880a-4cf7-a0e0-81d573ace274-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.603424 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9be3783e-45f7-4421-9e4f-1bba804afd6b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9be3783e-45f7-4421-9e4f-1bba804afd6b\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.603479 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/faff0a15-880a-4cf7-a0e0-81d573ace274-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.603511 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/faff0a15-880a-4cf7-a0e0-81d573ace274-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.603645 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/faff0a15-880a-4cf7-a0e0-81d573ace274-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.603683 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pl46\" (UniqueName: \"kubernetes.io/projected/faff0a15-880a-4cf7-a0e0-81d573ace274-kube-api-access-4pl46\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.604296 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/faff0a15-880a-4cf7-a0e0-81d573ace274-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.604888 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/faff0a15-880a-4cf7-a0e0-81d573ace274-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.605000 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/faff0a15-880a-4cf7-a0e0-81d573ace274-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.605289 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/faff0a15-880a-4cf7-a0e0-81d573ace274-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.607016 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/faff0a15-880a-4cf7-a0e0-81d573ace274-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.608315 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/faff0a15-880a-4cf7-a0e0-81d573ace274-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.615243 4787 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.615287 4787 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9be3783e-45f7-4421-9e4f-1bba804afd6b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9be3783e-45f7-4421-9e4f-1bba804afd6b\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/4938bfb1fada77ff175a504d95a3353fd8acb06803a7715257215a5ef30da24a/globalmount\"" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.616516 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/faff0a15-880a-4cf7-a0e0-81d573ace274-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.631814 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pl46\" (UniqueName: \"kubernetes.io/projected/faff0a15-880a-4cf7-a0e0-81d573ace274-kube-api-access-4pl46\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.646586 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-broadcaster-server-0"] Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.669087 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9be3783e-45f7-4421-9e4f-1bba804afd6b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9be3783e-45f7-4421-9e4f-1bba804afd6b\") pod \"rabbitmq-cell1-server-0\" (UID: \"faff0a15-880a-4cf7-a0e0-81d573ace274\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.689284 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.701345 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/rabbitmq-notifications-server-0"] Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.702577 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.707340 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-notifications-default-user" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.707543 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-notifications-erlang-cookie" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.709249 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-notifications-server-conf" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.709525 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-notifications-server-dockercfg-95rc9" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.712944 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-notifications-plugins-conf" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.727702 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-notifications-server-0"] Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.817749 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1d9a6f95-2510-4e74-b4f7-fb592d761c91-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.817866 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1d9a6f95-2510-4e74-b4f7-fb592d761c91-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.817983 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fkdp\" (UniqueName: \"kubernetes.io/projected/1d9a6f95-2510-4e74-b4f7-fb592d761c91-kube-api-access-5fkdp\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.818143 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1d9a6f95-2510-4e74-b4f7-fb592d761c91-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.818177 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f0f05e97-a20e-4f94-aba0-aaf49d5c6d61\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0f05e97-a20e-4f94-aba0-aaf49d5c6d61\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.818211 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1d9a6f95-2510-4e74-b4f7-fb592d761c91-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.818238 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1d9a6f95-2510-4e74-b4f7-fb592d761c91-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.818255 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1d9a6f95-2510-4e74-b4f7-fb592d761c91-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.818278 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1d9a6f95-2510-4e74-b4f7-fb592d761c91-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.919648 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fkdp\" (UniqueName: \"kubernetes.io/projected/1d9a6f95-2510-4e74-b4f7-fb592d761c91-kube-api-access-5fkdp\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.919701 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1d9a6f95-2510-4e74-b4f7-fb592d761c91-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.919732 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f0f05e97-a20e-4f94-aba0-aaf49d5c6d61\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0f05e97-a20e-4f94-aba0-aaf49d5c6d61\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.919756 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1d9a6f95-2510-4e74-b4f7-fb592d761c91-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.919779 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1d9a6f95-2510-4e74-b4f7-fb592d761c91-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.919799 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1d9a6f95-2510-4e74-b4f7-fb592d761c91-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.919825 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1d9a6f95-2510-4e74-b4f7-fb592d761c91-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.919846 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1d9a6f95-2510-4e74-b4f7-fb592d761c91-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.919887 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1d9a6f95-2510-4e74-b4f7-fb592d761c91-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.921225 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1d9a6f95-2510-4e74-b4f7-fb592d761c91-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.921478 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1d9a6f95-2510-4e74-b4f7-fb592d761c91-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.922279 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1d9a6f95-2510-4e74-b4f7-fb592d761c91-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.922393 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1d9a6f95-2510-4e74-b4f7-fb592d761c91-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.924074 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1d9a6f95-2510-4e74-b4f7-fb592d761c91-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.924108 4787 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.924136 4787 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f0f05e97-a20e-4f94-aba0-aaf49d5c6d61\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0f05e97-a20e-4f94-aba0-aaf49d5c6d61\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5726dae6ba81680bf46e46cc2901243be22c80d297c0bbc37108d63d7f136958/globalmount\"" pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.925747 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1d9a6f95-2510-4e74-b4f7-fb592d761c91-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.926837 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1d9a6f95-2510-4e74-b4f7-fb592d761c91-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.944438 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fkdp\" (UniqueName: \"kubernetes.io/projected/1d9a6f95-2510-4e74-b4f7-fb592d761c91-kube-api-access-5fkdp\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.953418 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f0f05e97-a20e-4f94-aba0-aaf49d5c6d61\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0f05e97-a20e-4f94-aba0-aaf49d5c6d61\") pod \"rabbitmq-notifications-server-0\" (UID: \"1d9a6f95-2510-4e74-b4f7-fb592d761c91\") " pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.976075 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/openstack-galera-0"] Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.977677 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.984457 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-config-data" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.984694 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-scripts" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.984873 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"cert-galera-openstack-svc" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.984948 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"galera-openstack-dockercfg-4zpns" Jan 27 08:08:53 crc kubenswrapper[4787]: I0127 08:08:53.990788 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"combined-ca-bundle" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.004915 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstack-galera-0"] Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.035214 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.129825 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bsbt\" (UniqueName: \"kubernetes.io/projected/144491fe-49b9-4a76-8da2-db798cf6a1e4-kube-api-access-4bsbt\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.130119 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/144491fe-49b9-4a76-8da2-db798cf6a1e4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.130268 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/144491fe-49b9-4a76-8da2-db798cf6a1e4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.130404 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/144491fe-49b9-4a76-8da2-db798cf6a1e4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.130470 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-0c1e6e82-ab87-4992-97e4-a29fbbb75e54\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0c1e6e82-ab87-4992-97e4-a29fbbb75e54\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.130643 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/144491fe-49b9-4a76-8da2-db798cf6a1e4-kolla-config\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.130701 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/144491fe-49b9-4a76-8da2-db798cf6a1e4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.130748 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/144491fe-49b9-4a76-8da2-db798cf6a1e4-config-data-default\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.146938 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-cell1-server-0"] Jan 27 08:08:54 crc kubenswrapper[4787]: W0127 08:08:54.164710 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfaff0a15_880a_4cf7_a0e0_81d573ace274.slice/crio-00921be760b5ce1051039b98d15ddc8ffa501aa8a39821ad9e6ae171814f6477 WatchSource:0}: Error finding container 00921be760b5ce1051039b98d15ddc8ffa501aa8a39821ad9e6ae171814f6477: Status 404 returned error can't find the container with id 00921be760b5ce1051039b98d15ddc8ffa501aa8a39821ad9e6ae171814f6477 Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.238907 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/144491fe-49b9-4a76-8da2-db798cf6a1e4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.239003 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/144491fe-49b9-4a76-8da2-db798cf6a1e4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.239073 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/144491fe-49b9-4a76-8da2-db798cf6a1e4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.239103 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-0c1e6e82-ab87-4992-97e4-a29fbbb75e54\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0c1e6e82-ab87-4992-97e4-a29fbbb75e54\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.239146 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/144491fe-49b9-4a76-8da2-db798cf6a1e4-kolla-config\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.239174 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/144491fe-49b9-4a76-8da2-db798cf6a1e4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.239195 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/144491fe-49b9-4a76-8da2-db798cf6a1e4-config-data-default\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.239223 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bsbt\" (UniqueName: \"kubernetes.io/projected/144491fe-49b9-4a76-8da2-db798cf6a1e4-kube-api-access-4bsbt\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.252402 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/144491fe-49b9-4a76-8da2-db798cf6a1e4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.252420 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/144491fe-49b9-4a76-8da2-db798cf6a1e4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.252978 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/144491fe-49b9-4a76-8da2-db798cf6a1e4-config-data-default\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.256870 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/144491fe-49b9-4a76-8da2-db798cf6a1e4-kolla-config\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.264043 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/144491fe-49b9-4a76-8da2-db798cf6a1e4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.267830 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/144491fe-49b9-4a76-8da2-db798cf6a1e4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.268480 4787 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.268523 4787 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-0c1e6e82-ab87-4992-97e4-a29fbbb75e54\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0c1e6e82-ab87-4992-97e4-a29fbbb75e54\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/51a2db4ae30aef6d7294853f76f947ddfd9610e76fc9480e73634ab28e724158/globalmount\"" pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.278206 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bsbt\" (UniqueName: \"kubernetes.io/projected/144491fe-49b9-4a76-8da2-db798cf6a1e4-kube-api-access-4bsbt\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.337566 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-0c1e6e82-ab87-4992-97e4-a29fbbb75e54\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0c1e6e82-ab87-4992-97e4-a29fbbb75e54\") pod \"openstack-galera-0\" (UID: \"144491fe-49b9-4a76-8da2-db798cf6a1e4\") " pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.359312 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.395804 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/memcached-0"] Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.398792 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/memcached-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.403222 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"memcached-memcached-dockercfg-wdr2q" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.403441 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"memcached-config-data" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.422021 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/memcached-0"] Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.468802 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" event={"ID":"68be2504-1de8-427c-9937-bd0428f7c5c4","Type":"ContainerStarted","Data":"9ea2cf6619889b10230a361f5a1a8f72e024b5a3185d7c9014ec65a08c5dea27"} Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.476514 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-server-0" event={"ID":"f84bfff4-b4f2-40d8-8b81-a9e5eb776442","Type":"ContainerStarted","Data":"7bdf39254feca0fe0d095f1c43851a7f4782f3195c8d7fab4715c3e0b9adfb11"} Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.478726 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-cell1-server-0" event={"ID":"faff0a15-880a-4cf7-a0e0-81d573ace274","Type":"ContainerStarted","Data":"00921be760b5ce1051039b98d15ddc8ffa501aa8a39821ad9e6ae171814f6477"} Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.549311 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b0fc932b-0d21-426c-9b58-15c14ad9e95b-config-data\") pod \"memcached-0\" (UID: \"b0fc932b-0d21-426c-9b58-15c14ad9e95b\") " pod="nova-kuttl-default/memcached-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.549892 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b0fc932b-0d21-426c-9b58-15c14ad9e95b-kolla-config\") pod \"memcached-0\" (UID: \"b0fc932b-0d21-426c-9b58-15c14ad9e95b\") " pod="nova-kuttl-default/memcached-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.549995 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tztlk\" (UniqueName: \"kubernetes.io/projected/b0fc932b-0d21-426c-9b58-15c14ad9e95b-kube-api-access-tztlk\") pod \"memcached-0\" (UID: \"b0fc932b-0d21-426c-9b58-15c14ad9e95b\") " pod="nova-kuttl-default/memcached-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.641938 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-notifications-server-0"] Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.652023 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b0fc932b-0d21-426c-9b58-15c14ad9e95b-config-data\") pod \"memcached-0\" (UID: \"b0fc932b-0d21-426c-9b58-15c14ad9e95b\") " pod="nova-kuttl-default/memcached-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.652174 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b0fc932b-0d21-426c-9b58-15c14ad9e95b-kolla-config\") pod \"memcached-0\" (UID: \"b0fc932b-0d21-426c-9b58-15c14ad9e95b\") " pod="nova-kuttl-default/memcached-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.652203 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tztlk\" (UniqueName: \"kubernetes.io/projected/b0fc932b-0d21-426c-9b58-15c14ad9e95b-kube-api-access-tztlk\") pod \"memcached-0\" (UID: \"b0fc932b-0d21-426c-9b58-15c14ad9e95b\") " pod="nova-kuttl-default/memcached-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.653286 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b0fc932b-0d21-426c-9b58-15c14ad9e95b-config-data\") pod \"memcached-0\" (UID: \"b0fc932b-0d21-426c-9b58-15c14ad9e95b\") " pod="nova-kuttl-default/memcached-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.653328 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b0fc932b-0d21-426c-9b58-15c14ad9e95b-kolla-config\") pod \"memcached-0\" (UID: \"b0fc932b-0d21-426c-9b58-15c14ad9e95b\") " pod="nova-kuttl-default/memcached-0" Jan 27 08:08:54 crc kubenswrapper[4787]: W0127 08:08:54.663169 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d9a6f95_2510_4e74_b4f7_fb592d761c91.slice/crio-ed6bb6f4d3c10e5f249cf88895cdfa708b5f24c7419f2e18f6691df2ff20145c WatchSource:0}: Error finding container ed6bb6f4d3c10e5f249cf88895cdfa708b5f24c7419f2e18f6691df2ff20145c: Status 404 returned error can't find the container with id ed6bb6f4d3c10e5f249cf88895cdfa708b5f24c7419f2e18f6691df2ff20145c Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.672222 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tztlk\" (UniqueName: \"kubernetes.io/projected/b0fc932b-0d21-426c-9b58-15c14ad9e95b-kube-api-access-tztlk\") pod \"memcached-0\" (UID: \"b0fc932b-0d21-426c-9b58-15c14ad9e95b\") " pod="nova-kuttl-default/memcached-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.749510 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/memcached-0" Jan 27 08:08:54 crc kubenswrapper[4787]: I0127 08:08:54.953297 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstack-galera-0"] Jan 27 08:08:54 crc kubenswrapper[4787]: W0127 08:08:54.985683 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod144491fe_49b9_4a76_8da2_db798cf6a1e4.slice/crio-e3226f087d6e245096d06155c2977662dd54d793d0d964e86c97b52b51ef5388 WatchSource:0}: Error finding container e3226f087d6e245096d06155c2977662dd54d793d0d964e86c97b52b51ef5388: Status 404 returned error can't find the container with id e3226f087d6e245096d06155c2977662dd54d793d0d964e86c97b52b51ef5388 Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.075868 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/memcached-0"] Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.453224 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/openstack-cell1-galera-0"] Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.456015 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.460473 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-cell1-scripts" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.461800 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-cell1-config-data" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.462009 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"cert-galera-openstack-cell1-svc" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.462184 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"galera-openstack-cell1-dockercfg-dnprn" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.486163 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstack-cell1-galera-0"] Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.493039 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-notifications-server-0" event={"ID":"1d9a6f95-2510-4e74-b4f7-fb592d761c91","Type":"ContainerStarted","Data":"ed6bb6f4d3c10e5f249cf88895cdfa708b5f24c7419f2e18f6691df2ff20145c"} Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.497463 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/memcached-0" event={"ID":"b0fc932b-0d21-426c-9b58-15c14ad9e95b","Type":"ContainerStarted","Data":"9d55989abeff56ebd57b0db618082786dc16023f5c5b9ee58731b4952ed38287"} Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.499973 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-galera-0" event={"ID":"144491fe-49b9-4a76-8da2-db798cf6a1e4","Type":"ContainerStarted","Data":"e3226f087d6e245096d06155c2977662dd54d793d0d964e86c97b52b51ef5388"} Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.583759 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lmrn\" (UniqueName: \"kubernetes.io/projected/841b999e-6e42-4f28-8fd8-334f69c3e3e6-kube-api-access-4lmrn\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.583812 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/841b999e-6e42-4f28-8fd8-334f69c3e3e6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.583834 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/841b999e-6e42-4f28-8fd8-334f69c3e3e6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.584106 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/841b999e-6e42-4f28-8fd8-334f69c3e3e6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.584138 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-14d23fb8-f9f3-4b2c-8c73-3e8cc19dad87\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-14d23fb8-f9f3-4b2c-8c73-3e8cc19dad87\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.584154 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/841b999e-6e42-4f28-8fd8-334f69c3e3e6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.584177 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/841b999e-6e42-4f28-8fd8-334f69c3e3e6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.584218 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/841b999e-6e42-4f28-8fd8-334f69c3e3e6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.685356 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lmrn\" (UniqueName: \"kubernetes.io/projected/841b999e-6e42-4f28-8fd8-334f69c3e3e6-kube-api-access-4lmrn\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.685809 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/841b999e-6e42-4f28-8fd8-334f69c3e3e6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.685844 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/841b999e-6e42-4f28-8fd8-334f69c3e3e6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.685918 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/841b999e-6e42-4f28-8fd8-334f69c3e3e6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.685982 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-14d23fb8-f9f3-4b2c-8c73-3e8cc19dad87\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-14d23fb8-f9f3-4b2c-8c73-3e8cc19dad87\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.686013 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/841b999e-6e42-4f28-8fd8-334f69c3e3e6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.686053 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/841b999e-6e42-4f28-8fd8-334f69c3e3e6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.686108 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/841b999e-6e42-4f28-8fd8-334f69c3e3e6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.691465 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/841b999e-6e42-4f28-8fd8-334f69c3e3e6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.692006 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/841b999e-6e42-4f28-8fd8-334f69c3e3e6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.692518 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/841b999e-6e42-4f28-8fd8-334f69c3e3e6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.693805 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/841b999e-6e42-4f28-8fd8-334f69c3e3e6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.694673 4787 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.694702 4787 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-14d23fb8-f9f3-4b2c-8c73-3e8cc19dad87\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-14d23fb8-f9f3-4b2c-8c73-3e8cc19dad87\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/124149b2461db4c62bd3ac0268dc92a81b0f2a8d060cca5c653c25533aaa0b7c/globalmount\"" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.701601 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/841b999e-6e42-4f28-8fd8-334f69c3e3e6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.702420 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/841b999e-6e42-4f28-8fd8-334f69c3e3e6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.706436 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lmrn\" (UniqueName: \"kubernetes.io/projected/841b999e-6e42-4f28-8fd8-334f69c3e3e6-kube-api-access-4lmrn\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.738734 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-14d23fb8-f9f3-4b2c-8c73-3e8cc19dad87\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-14d23fb8-f9f3-4b2c-8c73-3e8cc19dad87\") pod \"openstack-cell1-galera-0\" (UID: \"841b999e-6e42-4f28-8fd8-334f69c3e3e6\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:55 crc kubenswrapper[4787]: I0127 08:08:55.792873 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:08:56 crc kubenswrapper[4787]: I0127 08:08:56.453878 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstack-cell1-galera-0"] Jan 27 08:08:56 crc kubenswrapper[4787]: I0127 08:08:56.526228 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-cell1-galera-0" event={"ID":"841b999e-6e42-4f28-8fd8-334f69c3e3e6","Type":"ContainerStarted","Data":"c5a33ff6618a7b8dfde48f26982ff15af5a6d3de16ffa969b25adc99494ed3a8"} Jan 27 08:09:08 crc kubenswrapper[4787]: I0127 08:09:08.843041 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/memcached-0" event={"ID":"b0fc932b-0d21-426c-9b58-15c14ad9e95b","Type":"ContainerStarted","Data":"67e3f1370a7cab2f2c0368ba565165396abfb9d407f372525152832c659a03ab"} Jan 27 08:09:08 crc kubenswrapper[4787]: I0127 08:09:08.843978 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/memcached-0" Jan 27 08:09:08 crc kubenswrapper[4787]: I0127 08:09:08.846617 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-galera-0" event={"ID":"144491fe-49b9-4a76-8da2-db798cf6a1e4","Type":"ContainerStarted","Data":"4b8b94542d9b3f0005adea1a98c96aea842556c492a4a9bd9428e58b7ae1776c"} Jan 27 08:09:08 crc kubenswrapper[4787]: I0127 08:09:08.848286 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-cell1-galera-0" event={"ID":"841b999e-6e42-4f28-8fd8-334f69c3e3e6","Type":"ContainerStarted","Data":"f5d5c42142443823eacee17797aa68ba88ae203cfa5b52b13ad3b91e87f9abf6"} Jan 27 08:09:08 crc kubenswrapper[4787]: I0127 08:09:08.869726 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/memcached-0" podStartSLOduration=2.186614048 podStartE2EDuration="14.869696828s" podCreationTimestamp="2026-01-27 08:08:54 +0000 UTC" firstStartedPulling="2026-01-27 08:08:55.143931268 +0000 UTC m=+1040.796286770" lastFinishedPulling="2026-01-27 08:09:07.827014038 +0000 UTC m=+1053.479369550" observedRunningTime="2026-01-27 08:09:08.869043014 +0000 UTC m=+1054.521398516" watchObservedRunningTime="2026-01-27 08:09:08.869696828 +0000 UTC m=+1054.522052340" Jan 27 08:09:10 crc kubenswrapper[4787]: I0127 08:09:10.863449 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-cell1-server-0" event={"ID":"faff0a15-880a-4cf7-a0e0-81d573ace274","Type":"ContainerStarted","Data":"e5125bf2a44027a200b519f739fdd0eef6bd684deedf878b3831835beda70a71"} Jan 27 08:09:10 crc kubenswrapper[4787]: I0127 08:09:10.865507 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-notifications-server-0" event={"ID":"1d9a6f95-2510-4e74-b4f7-fb592d761c91","Type":"ContainerStarted","Data":"013b2f12459c08307712f9e501100fc81d14973b010bcdc9c24006a8c2afc6b8"} Jan 27 08:09:10 crc kubenswrapper[4787]: I0127 08:09:10.868850 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" event={"ID":"68be2504-1de8-427c-9937-bd0428f7c5c4","Type":"ContainerStarted","Data":"60be7c927e866796541fb6c53b2b04db37adbfe1b02bf03938d89ea5c5370982"} Jan 27 08:09:10 crc kubenswrapper[4787]: I0127 08:09:10.871684 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-server-0" event={"ID":"f84bfff4-b4f2-40d8-8b81-a9e5eb776442","Type":"ContainerStarted","Data":"b35fd8bea3d2d9361614b10dbc01f3a16983ab9d5a38a13fd4df6390c81bb921"} Jan 27 08:09:13 crc kubenswrapper[4787]: I0127 08:09:13.906484 4787 generic.go:334] "Generic (PLEG): container finished" podID="144491fe-49b9-4a76-8da2-db798cf6a1e4" containerID="4b8b94542d9b3f0005adea1a98c96aea842556c492a4a9bd9428e58b7ae1776c" exitCode=0 Jan 27 08:09:13 crc kubenswrapper[4787]: I0127 08:09:13.906595 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-galera-0" event={"ID":"144491fe-49b9-4a76-8da2-db798cf6a1e4","Type":"ContainerDied","Data":"4b8b94542d9b3f0005adea1a98c96aea842556c492a4a9bd9428e58b7ae1776c"} Jan 27 08:09:13 crc kubenswrapper[4787]: I0127 08:09:13.912318 4787 generic.go:334] "Generic (PLEG): container finished" podID="841b999e-6e42-4f28-8fd8-334f69c3e3e6" containerID="f5d5c42142443823eacee17797aa68ba88ae203cfa5b52b13ad3b91e87f9abf6" exitCode=0 Jan 27 08:09:13 crc kubenswrapper[4787]: I0127 08:09:13.912982 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-cell1-galera-0" event={"ID":"841b999e-6e42-4f28-8fd8-334f69c3e3e6","Type":"ContainerDied","Data":"f5d5c42142443823eacee17797aa68ba88ae203cfa5b52b13ad3b91e87f9abf6"} Jan 27 08:09:14 crc kubenswrapper[4787]: I0127 08:09:14.751368 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/memcached-0" Jan 27 08:09:14 crc kubenswrapper[4787]: I0127 08:09:14.923590 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-cell1-galera-0" event={"ID":"841b999e-6e42-4f28-8fd8-334f69c3e3e6","Type":"ContainerStarted","Data":"dd6a00442008c713c575f1ab05ebb090e7f5790a7479efa76fc3ab8a2d2f2a13"} Jan 27 08:09:14 crc kubenswrapper[4787]: I0127 08:09:14.926349 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-galera-0" event={"ID":"144491fe-49b9-4a76-8da2-db798cf6a1e4","Type":"ContainerStarted","Data":"b341bda581055b5a3691fb44350e38e84e31d5b1ca393c095829a7e67cdf9f4c"} Jan 27 08:09:14 crc kubenswrapper[4787]: I0127 08:09:14.951167 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/openstack-cell1-galera-0" podStartSLOduration=9.581992863 podStartE2EDuration="20.951146096s" podCreationTimestamp="2026-01-27 08:08:54 +0000 UTC" firstStartedPulling="2026-01-27 08:08:56.506729384 +0000 UTC m=+1042.159084876" lastFinishedPulling="2026-01-27 08:09:07.875882617 +0000 UTC m=+1053.528238109" observedRunningTime="2026-01-27 08:09:14.943140261 +0000 UTC m=+1060.595495773" watchObservedRunningTime="2026-01-27 08:09:14.951146096 +0000 UTC m=+1060.603501588" Jan 27 08:09:14 crc kubenswrapper[4787]: I0127 08:09:14.980105 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/openstack-galera-0" podStartSLOduration=10.045044712 podStartE2EDuration="22.980079595s" podCreationTimestamp="2026-01-27 08:08:52 +0000 UTC" firstStartedPulling="2026-01-27 08:08:54.990673518 +0000 UTC m=+1040.643029010" lastFinishedPulling="2026-01-27 08:09:07.925708401 +0000 UTC m=+1053.578063893" observedRunningTime="2026-01-27 08:09:14.976734648 +0000 UTC m=+1060.629090150" watchObservedRunningTime="2026-01-27 08:09:14.980079595 +0000 UTC m=+1060.632435087" Jan 27 08:09:15 crc kubenswrapper[4787]: I0127 08:09:15.794376 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:09:15 crc kubenswrapper[4787]: I0127 08:09:15.794424 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:09:18 crc kubenswrapper[4787]: E0127 08:09:18.998011 4787 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.181:51570->38.102.83.181:36351: write tcp 38.102.83.181:51570->38.102.83.181:36351: write: broken pipe Jan 27 08:09:19 crc kubenswrapper[4787]: I0127 08:09:19.900585 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:09:19 crc kubenswrapper[4787]: I0127 08:09:19.990901 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 27 08:09:22 crc kubenswrapper[4787]: I0127 08:09:22.822385 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:09:22 crc kubenswrapper[4787]: I0127 08:09:22.822842 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:09:22 crc kubenswrapper[4787]: I0127 08:09:22.822899 4787 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 08:09:22 crc kubenswrapper[4787]: I0127 08:09:22.823659 4787 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8407f8fd138ff9e300a7e8488e02cc30a66d50ed89a7c4ef1d3339c94e2a22b5"} pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 27 08:09:22 crc kubenswrapper[4787]: I0127 08:09:22.823735 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" containerID="cri-o://8407f8fd138ff9e300a7e8488e02cc30a66d50ed89a7c4ef1d3339c94e2a22b5" gracePeriod=600 Jan 27 08:09:23 crc kubenswrapper[4787]: I0127 08:09:23.000482 4787 generic.go:334] "Generic (PLEG): container finished" podID="f051e184-acac-47cf-9e04-9df648288715" containerID="8407f8fd138ff9e300a7e8488e02cc30a66d50ed89a7c4ef1d3339c94e2a22b5" exitCode=0 Jan 27 08:09:23 crc kubenswrapper[4787]: I0127 08:09:23.000564 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerDied","Data":"8407f8fd138ff9e300a7e8488e02cc30a66d50ed89a7c4ef1d3339c94e2a22b5"} Jan 27 08:09:23 crc kubenswrapper[4787]: I0127 08:09:23.000619 4787 scope.go:117] "RemoveContainer" containerID="76c11a5da51cedd24f4d664015e61242e532ce42823eb519b69423acc27d454d" Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.013321 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerStarted","Data":"62fa4f58004172098a708acf93c4ba2c2d75c5b2ad098327437e26bfa28ab448"} Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.361314 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.362513 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.449799 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.524487 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/root-account-create-update-c6t44"] Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.525784 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-c6t44" Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.528972 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstack-cell1-mariadb-root-db-secret" Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.539115 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-c6t44"] Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.630679 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d295e1a-f5ae-49e1-aa8c-75a73af430a4-operator-scripts\") pod \"root-account-create-update-c6t44\" (UID: \"8d295e1a-f5ae-49e1-aa8c-75a73af430a4\") " pod="nova-kuttl-default/root-account-create-update-c6t44" Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.630958 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hllsc\" (UniqueName: \"kubernetes.io/projected/8d295e1a-f5ae-49e1-aa8c-75a73af430a4-kube-api-access-hllsc\") pod \"root-account-create-update-c6t44\" (UID: \"8d295e1a-f5ae-49e1-aa8c-75a73af430a4\") " pod="nova-kuttl-default/root-account-create-update-c6t44" Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.732613 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d295e1a-f5ae-49e1-aa8c-75a73af430a4-operator-scripts\") pod \"root-account-create-update-c6t44\" (UID: \"8d295e1a-f5ae-49e1-aa8c-75a73af430a4\") " pod="nova-kuttl-default/root-account-create-update-c6t44" Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.732890 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hllsc\" (UniqueName: \"kubernetes.io/projected/8d295e1a-f5ae-49e1-aa8c-75a73af430a4-kube-api-access-hllsc\") pod \"root-account-create-update-c6t44\" (UID: \"8d295e1a-f5ae-49e1-aa8c-75a73af430a4\") " pod="nova-kuttl-default/root-account-create-update-c6t44" Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.735372 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d295e1a-f5ae-49e1-aa8c-75a73af430a4-operator-scripts\") pod \"root-account-create-update-c6t44\" (UID: \"8d295e1a-f5ae-49e1-aa8c-75a73af430a4\") " pod="nova-kuttl-default/root-account-create-update-c6t44" Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.757215 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hllsc\" (UniqueName: \"kubernetes.io/projected/8d295e1a-f5ae-49e1-aa8c-75a73af430a4-kube-api-access-hllsc\") pod \"root-account-create-update-c6t44\" (UID: \"8d295e1a-f5ae-49e1-aa8c-75a73af430a4\") " pod="nova-kuttl-default/root-account-create-update-c6t44" Jan 27 08:09:24 crc kubenswrapper[4787]: I0127 08:09:24.847683 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-c6t44" Jan 27 08:09:25 crc kubenswrapper[4787]: I0127 08:09:25.106561 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/openstack-galera-0" Jan 27 08:09:25 crc kubenswrapper[4787]: I0127 08:09:25.293927 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-c6t44"] Jan 27 08:09:26 crc kubenswrapper[4787]: I0127 08:09:26.038942 4787 generic.go:334] "Generic (PLEG): container finished" podID="8d295e1a-f5ae-49e1-aa8c-75a73af430a4" containerID="d304505100a9e40f977481818afbc2239ed1910ed8a9349b8d4d47740a9431bb" exitCode=0 Jan 27 08:09:26 crc kubenswrapper[4787]: I0127 08:09:26.039041 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-c6t44" event={"ID":"8d295e1a-f5ae-49e1-aa8c-75a73af430a4","Type":"ContainerDied","Data":"d304505100a9e40f977481818afbc2239ed1910ed8a9349b8d4d47740a9431bb"} Jan 27 08:09:26 crc kubenswrapper[4787]: I0127 08:09:26.039422 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-c6t44" event={"ID":"8d295e1a-f5ae-49e1-aa8c-75a73af430a4","Type":"ContainerStarted","Data":"55910de522cd1c628eb820391f2a22eac660770fb65284eb1346248d992f8f13"} Jan 27 08:09:27 crc kubenswrapper[4787]: I0127 08:09:27.430009 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-c6t44" Jan 27 08:09:27 crc kubenswrapper[4787]: I0127 08:09:27.590122 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d295e1a-f5ae-49e1-aa8c-75a73af430a4-operator-scripts\") pod \"8d295e1a-f5ae-49e1-aa8c-75a73af430a4\" (UID: \"8d295e1a-f5ae-49e1-aa8c-75a73af430a4\") " Jan 27 08:09:27 crc kubenswrapper[4787]: I0127 08:09:27.590238 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hllsc\" (UniqueName: \"kubernetes.io/projected/8d295e1a-f5ae-49e1-aa8c-75a73af430a4-kube-api-access-hllsc\") pod \"8d295e1a-f5ae-49e1-aa8c-75a73af430a4\" (UID: \"8d295e1a-f5ae-49e1-aa8c-75a73af430a4\") " Jan 27 08:09:27 crc kubenswrapper[4787]: I0127 08:09:27.591635 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d295e1a-f5ae-49e1-aa8c-75a73af430a4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8d295e1a-f5ae-49e1-aa8c-75a73af430a4" (UID: "8d295e1a-f5ae-49e1-aa8c-75a73af430a4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:09:27 crc kubenswrapper[4787]: I0127 08:09:27.599908 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d295e1a-f5ae-49e1-aa8c-75a73af430a4-kube-api-access-hllsc" (OuterVolumeSpecName: "kube-api-access-hllsc") pod "8d295e1a-f5ae-49e1-aa8c-75a73af430a4" (UID: "8d295e1a-f5ae-49e1-aa8c-75a73af430a4"). InnerVolumeSpecName "kube-api-access-hllsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:09:27 crc kubenswrapper[4787]: I0127 08:09:27.693273 4787 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d295e1a-f5ae-49e1-aa8c-75a73af430a4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 27 08:09:27 crc kubenswrapper[4787]: I0127 08:09:27.693334 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hllsc\" (UniqueName: \"kubernetes.io/projected/8d295e1a-f5ae-49e1-aa8c-75a73af430a4-kube-api-access-hllsc\") on node \"crc\" DevicePath \"\"" Jan 27 08:09:28 crc kubenswrapper[4787]: I0127 08:09:28.056295 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-c6t44" event={"ID":"8d295e1a-f5ae-49e1-aa8c-75a73af430a4","Type":"ContainerDied","Data":"55910de522cd1c628eb820391f2a22eac660770fb65284eb1346248d992f8f13"} Jan 27 08:09:28 crc kubenswrapper[4787]: I0127 08:09:28.056336 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-c6t44" Jan 27 08:09:28 crc kubenswrapper[4787]: I0127 08:09:28.056350 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55910de522cd1c628eb820391f2a22eac660770fb65284eb1346248d992f8f13" Jan 27 08:09:32 crc kubenswrapper[4787]: I0127 08:09:32.946164 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/root-account-create-update-c6t44"] Jan 27 08:09:32 crc kubenswrapper[4787]: I0127 08:09:32.953193 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/root-account-create-update-c6t44"] Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.037859 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/root-account-create-update-b4lsd"] Jan 27 08:09:33 crc kubenswrapper[4787]: E0127 08:09:33.038256 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d295e1a-f5ae-49e1-aa8c-75a73af430a4" containerName="mariadb-account-create-update" Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.038281 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d295e1a-f5ae-49e1-aa8c-75a73af430a4" containerName="mariadb-account-create-update" Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.038494 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d295e1a-f5ae-49e1-aa8c-75a73af430a4" containerName="mariadb-account-create-update" Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.039173 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-b4lsd" Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.041358 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstack-mariadb-root-db-secret" Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.049685 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-b4lsd"] Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.092660 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3150913-c395-4ed0-a5d3-616426f58671-operator-scripts\") pod \"root-account-create-update-b4lsd\" (UID: \"c3150913-c395-4ed0-a5d3-616426f58671\") " pod="nova-kuttl-default/root-account-create-update-b4lsd" Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.092864 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2cg2\" (UniqueName: \"kubernetes.io/projected/c3150913-c395-4ed0-a5d3-616426f58671-kube-api-access-f2cg2\") pod \"root-account-create-update-b4lsd\" (UID: \"c3150913-c395-4ed0-a5d3-616426f58671\") " pod="nova-kuttl-default/root-account-create-update-b4lsd" Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.100041 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d295e1a-f5ae-49e1-aa8c-75a73af430a4" path="/var/lib/kubelet/pods/8d295e1a-f5ae-49e1-aa8c-75a73af430a4/volumes" Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.194930 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3150913-c395-4ed0-a5d3-616426f58671-operator-scripts\") pod \"root-account-create-update-b4lsd\" (UID: \"c3150913-c395-4ed0-a5d3-616426f58671\") " pod="nova-kuttl-default/root-account-create-update-b4lsd" Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.195004 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2cg2\" (UniqueName: \"kubernetes.io/projected/c3150913-c395-4ed0-a5d3-616426f58671-kube-api-access-f2cg2\") pod \"root-account-create-update-b4lsd\" (UID: \"c3150913-c395-4ed0-a5d3-616426f58671\") " pod="nova-kuttl-default/root-account-create-update-b4lsd" Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.196189 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3150913-c395-4ed0-a5d3-616426f58671-operator-scripts\") pod \"root-account-create-update-b4lsd\" (UID: \"c3150913-c395-4ed0-a5d3-616426f58671\") " pod="nova-kuttl-default/root-account-create-update-b4lsd" Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.221651 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2cg2\" (UniqueName: \"kubernetes.io/projected/c3150913-c395-4ed0-a5d3-616426f58671-kube-api-access-f2cg2\") pod \"root-account-create-update-b4lsd\" (UID: \"c3150913-c395-4ed0-a5d3-616426f58671\") " pod="nova-kuttl-default/root-account-create-update-b4lsd" Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.365836 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-b4lsd" Jan 27 08:09:33 crc kubenswrapper[4787]: I0127 08:09:33.839229 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-b4lsd"] Jan 27 08:09:33 crc kubenswrapper[4787]: W0127 08:09:33.845366 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3150913_c395_4ed0_a5d3_616426f58671.slice/crio-a7da237db8236aafbc9b1a0633c16718a4e8ac2260b89cb17571476da9604d22 WatchSource:0}: Error finding container a7da237db8236aafbc9b1a0633c16718a4e8ac2260b89cb17571476da9604d22: Status 404 returned error can't find the container with id a7da237db8236aafbc9b1a0633c16718a4e8ac2260b89cb17571476da9604d22 Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.116460 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-b4lsd" event={"ID":"c3150913-c395-4ed0-a5d3-616426f58671","Type":"ContainerStarted","Data":"b5b33f88492ce6991e1e36a7024a76b4049def405dfe4dc15a1159b39d6f6633"} Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.116866 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-b4lsd" event={"ID":"c3150913-c395-4ed0-a5d3-616426f58671","Type":"ContainerStarted","Data":"a7da237db8236aafbc9b1a0633c16718a4e8ac2260b89cb17571476da9604d22"} Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.137061 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/root-account-create-update-b4lsd" podStartSLOduration=1.137032 podStartE2EDuration="1.137032s" podCreationTimestamp="2026-01-27 08:09:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 08:09:34.133194951 +0000 UTC m=+1079.785550453" watchObservedRunningTime="2026-01-27 08:09:34.137032 +0000 UTC m=+1079.789387492" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.226311 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-db-create-wjmqh"] Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.227460 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-create-wjmqh" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.237971 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-db-create-wjmqh"] Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.317934 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/205e2c3c-82ef-4aaa-b1b3-0465a855bde9-operator-scripts\") pod \"keystone-db-create-wjmqh\" (UID: \"205e2c3c-82ef-4aaa-b1b3-0465a855bde9\") " pod="nova-kuttl-default/keystone-db-create-wjmqh" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.318010 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m6pj\" (UniqueName: \"kubernetes.io/projected/205e2c3c-82ef-4aaa-b1b3-0465a855bde9-kube-api-access-8m6pj\") pod \"keystone-db-create-wjmqh\" (UID: \"205e2c3c-82ef-4aaa-b1b3-0465a855bde9\") " pod="nova-kuttl-default/keystone-db-create-wjmqh" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.343183 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-5828-account-create-update-xh7r6"] Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.344470 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-5828-account-create-update-xh7r6" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.347084 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-db-secret" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.359532 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-5828-account-create-update-xh7r6"] Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.420587 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mhst\" (UniqueName: \"kubernetes.io/projected/646de171-9e06-4df6-9e1e-07319c04b95c-kube-api-access-6mhst\") pod \"keystone-5828-account-create-update-xh7r6\" (UID: \"646de171-9e06-4df6-9e1e-07319c04b95c\") " pod="nova-kuttl-default/keystone-5828-account-create-update-xh7r6" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.420664 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/205e2c3c-82ef-4aaa-b1b3-0465a855bde9-operator-scripts\") pod \"keystone-db-create-wjmqh\" (UID: \"205e2c3c-82ef-4aaa-b1b3-0465a855bde9\") " pod="nova-kuttl-default/keystone-db-create-wjmqh" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.420695 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/646de171-9e06-4df6-9e1e-07319c04b95c-operator-scripts\") pod \"keystone-5828-account-create-update-xh7r6\" (UID: \"646de171-9e06-4df6-9e1e-07319c04b95c\") " pod="nova-kuttl-default/keystone-5828-account-create-update-xh7r6" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.420735 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m6pj\" (UniqueName: \"kubernetes.io/projected/205e2c3c-82ef-4aaa-b1b3-0465a855bde9-kube-api-access-8m6pj\") pod \"keystone-db-create-wjmqh\" (UID: \"205e2c3c-82ef-4aaa-b1b3-0465a855bde9\") " pod="nova-kuttl-default/keystone-db-create-wjmqh" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.421358 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/205e2c3c-82ef-4aaa-b1b3-0465a855bde9-operator-scripts\") pod \"keystone-db-create-wjmqh\" (UID: \"205e2c3c-82ef-4aaa-b1b3-0465a855bde9\") " pod="nova-kuttl-default/keystone-db-create-wjmqh" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.445188 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m6pj\" (UniqueName: \"kubernetes.io/projected/205e2c3c-82ef-4aaa-b1b3-0465a855bde9-kube-api-access-8m6pj\") pod \"keystone-db-create-wjmqh\" (UID: \"205e2c3c-82ef-4aaa-b1b3-0465a855bde9\") " pod="nova-kuttl-default/keystone-db-create-wjmqh" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.521344 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mhst\" (UniqueName: \"kubernetes.io/projected/646de171-9e06-4df6-9e1e-07319c04b95c-kube-api-access-6mhst\") pod \"keystone-5828-account-create-update-xh7r6\" (UID: \"646de171-9e06-4df6-9e1e-07319c04b95c\") " pod="nova-kuttl-default/keystone-5828-account-create-update-xh7r6" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.521417 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/646de171-9e06-4df6-9e1e-07319c04b95c-operator-scripts\") pod \"keystone-5828-account-create-update-xh7r6\" (UID: \"646de171-9e06-4df6-9e1e-07319c04b95c\") " pod="nova-kuttl-default/keystone-5828-account-create-update-xh7r6" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.523239 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/646de171-9e06-4df6-9e1e-07319c04b95c-operator-scripts\") pod \"keystone-5828-account-create-update-xh7r6\" (UID: \"646de171-9e06-4df6-9e1e-07319c04b95c\") " pod="nova-kuttl-default/keystone-5828-account-create-update-xh7r6" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.540269 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/placement-db-create-8rt5v"] Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.542140 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-create-8rt5v" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.553967 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-db-create-8rt5v"] Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.559588 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mhst\" (UniqueName: \"kubernetes.io/projected/646de171-9e06-4df6-9e1e-07319c04b95c-kube-api-access-6mhst\") pod \"keystone-5828-account-create-update-xh7r6\" (UID: \"646de171-9e06-4df6-9e1e-07319c04b95c\") " pod="nova-kuttl-default/keystone-5828-account-create-update-xh7r6" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.610076 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-create-wjmqh" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.622510 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htzmc\" (UniqueName: \"kubernetes.io/projected/7ba9e1b4-f385-47f9-80f6-508a3406aa2a-kube-api-access-htzmc\") pod \"placement-db-create-8rt5v\" (UID: \"7ba9e1b4-f385-47f9-80f6-508a3406aa2a\") " pod="nova-kuttl-default/placement-db-create-8rt5v" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.622622 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ba9e1b4-f385-47f9-80f6-508a3406aa2a-operator-scripts\") pod \"placement-db-create-8rt5v\" (UID: \"7ba9e1b4-f385-47f9-80f6-508a3406aa2a\") " pod="nova-kuttl-default/placement-db-create-8rt5v" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.640425 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/placement-6d8d-account-create-update-5jvm9"] Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.641701 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-6d8d-account-create-update-5jvm9" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.646836 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-db-secret" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.652506 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-6d8d-account-create-update-5jvm9"] Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.664895 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-5828-account-create-update-xh7r6" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.724093 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htzmc\" (UniqueName: \"kubernetes.io/projected/7ba9e1b4-f385-47f9-80f6-508a3406aa2a-kube-api-access-htzmc\") pod \"placement-db-create-8rt5v\" (UID: \"7ba9e1b4-f385-47f9-80f6-508a3406aa2a\") " pod="nova-kuttl-default/placement-db-create-8rt5v" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.724211 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ba9e1b4-f385-47f9-80f6-508a3406aa2a-operator-scripts\") pod \"placement-db-create-8rt5v\" (UID: \"7ba9e1b4-f385-47f9-80f6-508a3406aa2a\") " pod="nova-kuttl-default/placement-db-create-8rt5v" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.725231 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ba9e1b4-f385-47f9-80f6-508a3406aa2a-operator-scripts\") pod \"placement-db-create-8rt5v\" (UID: \"7ba9e1b4-f385-47f9-80f6-508a3406aa2a\") " pod="nova-kuttl-default/placement-db-create-8rt5v" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.752913 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htzmc\" (UniqueName: \"kubernetes.io/projected/7ba9e1b4-f385-47f9-80f6-508a3406aa2a-kube-api-access-htzmc\") pod \"placement-db-create-8rt5v\" (UID: \"7ba9e1b4-f385-47f9-80f6-508a3406aa2a\") " pod="nova-kuttl-default/placement-db-create-8rt5v" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.828020 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e3d75aa-1156-48f5-9435-b8b3cd2ec555-operator-scripts\") pod \"placement-6d8d-account-create-update-5jvm9\" (UID: \"2e3d75aa-1156-48f5-9435-b8b3cd2ec555\") " pod="nova-kuttl-default/placement-6d8d-account-create-update-5jvm9" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.828194 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49fcz\" (UniqueName: \"kubernetes.io/projected/2e3d75aa-1156-48f5-9435-b8b3cd2ec555-kube-api-access-49fcz\") pod \"placement-6d8d-account-create-update-5jvm9\" (UID: \"2e3d75aa-1156-48f5-9435-b8b3cd2ec555\") " pod="nova-kuttl-default/placement-6d8d-account-create-update-5jvm9" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.894400 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-create-8rt5v" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.935658 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49fcz\" (UniqueName: \"kubernetes.io/projected/2e3d75aa-1156-48f5-9435-b8b3cd2ec555-kube-api-access-49fcz\") pod \"placement-6d8d-account-create-update-5jvm9\" (UID: \"2e3d75aa-1156-48f5-9435-b8b3cd2ec555\") " pod="nova-kuttl-default/placement-6d8d-account-create-update-5jvm9" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.935753 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e3d75aa-1156-48f5-9435-b8b3cd2ec555-operator-scripts\") pod \"placement-6d8d-account-create-update-5jvm9\" (UID: \"2e3d75aa-1156-48f5-9435-b8b3cd2ec555\") " pod="nova-kuttl-default/placement-6d8d-account-create-update-5jvm9" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.937258 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e3d75aa-1156-48f5-9435-b8b3cd2ec555-operator-scripts\") pod \"placement-6d8d-account-create-update-5jvm9\" (UID: \"2e3d75aa-1156-48f5-9435-b8b3cd2ec555\") " pod="nova-kuttl-default/placement-6d8d-account-create-update-5jvm9" Jan 27 08:09:34 crc kubenswrapper[4787]: I0127 08:09:34.958133 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49fcz\" (UniqueName: \"kubernetes.io/projected/2e3d75aa-1156-48f5-9435-b8b3cd2ec555-kube-api-access-49fcz\") pod \"placement-6d8d-account-create-update-5jvm9\" (UID: \"2e3d75aa-1156-48f5-9435-b8b3cd2ec555\") " pod="nova-kuttl-default/placement-6d8d-account-create-update-5jvm9" Jan 27 08:09:35 crc kubenswrapper[4787]: I0127 08:09:35.041368 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-5828-account-create-update-xh7r6"] Jan 27 08:09:35 crc kubenswrapper[4787]: W0127 08:09:35.043005 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod646de171_9e06_4df6_9e1e_07319c04b95c.slice/crio-61ebf61c0ee28539578df85a272d783dc4becc606055385faa63ee88cef6d0cf WatchSource:0}: Error finding container 61ebf61c0ee28539578df85a272d783dc4becc606055385faa63ee88cef6d0cf: Status 404 returned error can't find the container with id 61ebf61c0ee28539578df85a272d783dc4becc606055385faa63ee88cef6d0cf Jan 27 08:09:35 crc kubenswrapper[4787]: I0127 08:09:35.047265 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-6d8d-account-create-update-5jvm9" Jan 27 08:09:35 crc kubenswrapper[4787]: I0127 08:09:35.071823 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-db-secret" Jan 27 08:09:35 crc kubenswrapper[4787]: I0127 08:09:35.107023 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-db-create-wjmqh"] Jan 27 08:09:35 crc kubenswrapper[4787]: W0127 08:09:35.163283 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod205e2c3c_82ef_4aaa_b1b3_0465a855bde9.slice/crio-3693ebfab339d06c27513a356c82a28e6e678e9480e3c3b9fac9bc0c2d5ed91b WatchSource:0}: Error finding container 3693ebfab339d06c27513a356c82a28e6e678e9480e3c3b9fac9bc0c2d5ed91b: Status 404 returned error can't find the container with id 3693ebfab339d06c27513a356c82a28e6e678e9480e3c3b9fac9bc0c2d5ed91b Jan 27 08:09:35 crc kubenswrapper[4787]: I0127 08:09:35.164057 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-5828-account-create-update-xh7r6" event={"ID":"646de171-9e06-4df6-9e1e-07319c04b95c","Type":"ContainerStarted","Data":"61ebf61c0ee28539578df85a272d783dc4becc606055385faa63ee88cef6d0cf"} Jan 27 08:09:35 crc kubenswrapper[4787]: I0127 08:09:35.171076 4787 generic.go:334] "Generic (PLEG): container finished" podID="c3150913-c395-4ed0-a5d3-616426f58671" containerID="b5b33f88492ce6991e1e36a7024a76b4049def405dfe4dc15a1159b39d6f6633" exitCode=0 Jan 27 08:09:35 crc kubenswrapper[4787]: I0127 08:09:35.171139 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-b4lsd" event={"ID":"c3150913-c395-4ed0-a5d3-616426f58671","Type":"ContainerDied","Data":"b5b33f88492ce6991e1e36a7024a76b4049def405dfe4dc15a1159b39d6f6633"} Jan 27 08:09:35 crc kubenswrapper[4787]: I0127 08:09:35.344049 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-db-create-8rt5v"] Jan 27 08:09:35 crc kubenswrapper[4787]: W0127 08:09:35.347317 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ba9e1b4_f385_47f9_80f6_508a3406aa2a.slice/crio-644b272208b8e1b224dfece73de7f0ce1f7d4fb488f30fb14df01a26475a730e WatchSource:0}: Error finding container 644b272208b8e1b224dfece73de7f0ce1f7d4fb488f30fb14df01a26475a730e: Status 404 returned error can't find the container with id 644b272208b8e1b224dfece73de7f0ce1f7d4fb488f30fb14df01a26475a730e Jan 27 08:09:35 crc kubenswrapper[4787]: I0127 08:09:35.518253 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-6d8d-account-create-update-5jvm9"] Jan 27 08:09:35 crc kubenswrapper[4787]: I0127 08:09:35.530024 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-db-secret" Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.186791 4787 generic.go:334] "Generic (PLEG): container finished" podID="7ba9e1b4-f385-47f9-80f6-508a3406aa2a" containerID="9ab563345b762a72fbe3b47df8d23be9db6758acc4bcc324921200a4b4455e30" exitCode=0 Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.186901 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-create-8rt5v" event={"ID":"7ba9e1b4-f385-47f9-80f6-508a3406aa2a","Type":"ContainerDied","Data":"9ab563345b762a72fbe3b47df8d23be9db6758acc4bcc324921200a4b4455e30"} Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.186946 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-create-8rt5v" event={"ID":"7ba9e1b4-f385-47f9-80f6-508a3406aa2a","Type":"ContainerStarted","Data":"644b272208b8e1b224dfece73de7f0ce1f7d4fb488f30fb14df01a26475a730e"} Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.188877 4787 generic.go:334] "Generic (PLEG): container finished" podID="646de171-9e06-4df6-9e1e-07319c04b95c" containerID="a478f74f2f9db06c59e7ba51e2be735e9b9fb9d652017f85e3a9daea1ac8536e" exitCode=0 Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.188995 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-5828-account-create-update-xh7r6" event={"ID":"646de171-9e06-4df6-9e1e-07319c04b95c","Type":"ContainerDied","Data":"a478f74f2f9db06c59e7ba51e2be735e9b9fb9d652017f85e3a9daea1ac8536e"} Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.191136 4787 generic.go:334] "Generic (PLEG): container finished" podID="205e2c3c-82ef-4aaa-b1b3-0465a855bde9" containerID="2e3468c57e9613d84bee598f4f0c5a4b7e44d892a408fe4d11cddeffe9d5b60b" exitCode=0 Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.191175 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-create-wjmqh" event={"ID":"205e2c3c-82ef-4aaa-b1b3-0465a855bde9","Type":"ContainerDied","Data":"2e3468c57e9613d84bee598f4f0c5a4b7e44d892a408fe4d11cddeffe9d5b60b"} Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.191222 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-create-wjmqh" event={"ID":"205e2c3c-82ef-4aaa-b1b3-0465a855bde9","Type":"ContainerStarted","Data":"3693ebfab339d06c27513a356c82a28e6e678e9480e3c3b9fac9bc0c2d5ed91b"} Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.192925 4787 generic.go:334] "Generic (PLEG): container finished" podID="2e3d75aa-1156-48f5-9435-b8b3cd2ec555" containerID="3f06a186b769d4a1f5e238fd19184a35781ff17d23f2779b2c34abe34bf41b44" exitCode=0 Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.192952 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-6d8d-account-create-update-5jvm9" event={"ID":"2e3d75aa-1156-48f5-9435-b8b3cd2ec555","Type":"ContainerDied","Data":"3f06a186b769d4a1f5e238fd19184a35781ff17d23f2779b2c34abe34bf41b44"} Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.192987 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-6d8d-account-create-update-5jvm9" event={"ID":"2e3d75aa-1156-48f5-9435-b8b3cd2ec555","Type":"ContainerStarted","Data":"b404522cda13cd80426b31e5ac61fc611fc37a31277fcf44a25db1d76987c6f3"} Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.523195 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-b4lsd" Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.578934 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2cg2\" (UniqueName: \"kubernetes.io/projected/c3150913-c395-4ed0-a5d3-616426f58671-kube-api-access-f2cg2\") pod \"c3150913-c395-4ed0-a5d3-616426f58671\" (UID: \"c3150913-c395-4ed0-a5d3-616426f58671\") " Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.579076 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3150913-c395-4ed0-a5d3-616426f58671-operator-scripts\") pod \"c3150913-c395-4ed0-a5d3-616426f58671\" (UID: \"c3150913-c395-4ed0-a5d3-616426f58671\") " Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.580485 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3150913-c395-4ed0-a5d3-616426f58671-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c3150913-c395-4ed0-a5d3-616426f58671" (UID: "c3150913-c395-4ed0-a5d3-616426f58671"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.588143 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3150913-c395-4ed0-a5d3-616426f58671-kube-api-access-f2cg2" (OuterVolumeSpecName: "kube-api-access-f2cg2") pod "c3150913-c395-4ed0-a5d3-616426f58671" (UID: "c3150913-c395-4ed0-a5d3-616426f58671"). InnerVolumeSpecName "kube-api-access-f2cg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.682961 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2cg2\" (UniqueName: \"kubernetes.io/projected/c3150913-c395-4ed0-a5d3-616426f58671-kube-api-access-f2cg2\") on node \"crc\" DevicePath \"\"" Jan 27 08:09:36 crc kubenswrapper[4787]: I0127 08:09:36.683024 4787 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3150913-c395-4ed0-a5d3-616426f58671-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.204163 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-b4lsd" event={"ID":"c3150913-c395-4ed0-a5d3-616426f58671","Type":"ContainerDied","Data":"a7da237db8236aafbc9b1a0633c16718a4e8ac2260b89cb17571476da9604d22"} Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.204233 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7da237db8236aafbc9b1a0633c16718a4e8ac2260b89cb17571476da9604d22" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.204567 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-b4lsd" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.597793 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-6d8d-account-create-update-5jvm9" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.626439 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49fcz\" (UniqueName: \"kubernetes.io/projected/2e3d75aa-1156-48f5-9435-b8b3cd2ec555-kube-api-access-49fcz\") pod \"2e3d75aa-1156-48f5-9435-b8b3cd2ec555\" (UID: \"2e3d75aa-1156-48f5-9435-b8b3cd2ec555\") " Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.626648 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e3d75aa-1156-48f5-9435-b8b3cd2ec555-operator-scripts\") pod \"2e3d75aa-1156-48f5-9435-b8b3cd2ec555\" (UID: \"2e3d75aa-1156-48f5-9435-b8b3cd2ec555\") " Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.628031 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e3d75aa-1156-48f5-9435-b8b3cd2ec555-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2e3d75aa-1156-48f5-9435-b8b3cd2ec555" (UID: "2e3d75aa-1156-48f5-9435-b8b3cd2ec555"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.635433 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e3d75aa-1156-48f5-9435-b8b3cd2ec555-kube-api-access-49fcz" (OuterVolumeSpecName: "kube-api-access-49fcz") pod "2e3d75aa-1156-48f5-9435-b8b3cd2ec555" (UID: "2e3d75aa-1156-48f5-9435-b8b3cd2ec555"). InnerVolumeSpecName "kube-api-access-49fcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.727964 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49fcz\" (UniqueName: \"kubernetes.io/projected/2e3d75aa-1156-48f5-9435-b8b3cd2ec555-kube-api-access-49fcz\") on node \"crc\" DevicePath \"\"" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.728010 4787 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e3d75aa-1156-48f5-9435-b8b3cd2ec555-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.735155 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-5828-account-create-update-xh7r6" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.743474 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-create-8rt5v" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.761765 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-create-wjmqh" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.830202 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8m6pj\" (UniqueName: \"kubernetes.io/projected/205e2c3c-82ef-4aaa-b1b3-0465a855bde9-kube-api-access-8m6pj\") pod \"205e2c3c-82ef-4aaa-b1b3-0465a855bde9\" (UID: \"205e2c3c-82ef-4aaa-b1b3-0465a855bde9\") " Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.830354 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/205e2c3c-82ef-4aaa-b1b3-0465a855bde9-operator-scripts\") pod \"205e2c3c-82ef-4aaa-b1b3-0465a855bde9\" (UID: \"205e2c3c-82ef-4aaa-b1b3-0465a855bde9\") " Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.830393 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/646de171-9e06-4df6-9e1e-07319c04b95c-operator-scripts\") pod \"646de171-9e06-4df6-9e1e-07319c04b95c\" (UID: \"646de171-9e06-4df6-9e1e-07319c04b95c\") " Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.830460 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htzmc\" (UniqueName: \"kubernetes.io/projected/7ba9e1b4-f385-47f9-80f6-508a3406aa2a-kube-api-access-htzmc\") pod \"7ba9e1b4-f385-47f9-80f6-508a3406aa2a\" (UID: \"7ba9e1b4-f385-47f9-80f6-508a3406aa2a\") " Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.830501 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mhst\" (UniqueName: \"kubernetes.io/projected/646de171-9e06-4df6-9e1e-07319c04b95c-kube-api-access-6mhst\") pod \"646de171-9e06-4df6-9e1e-07319c04b95c\" (UID: \"646de171-9e06-4df6-9e1e-07319c04b95c\") " Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.830581 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ba9e1b4-f385-47f9-80f6-508a3406aa2a-operator-scripts\") pod \"7ba9e1b4-f385-47f9-80f6-508a3406aa2a\" (UID: \"7ba9e1b4-f385-47f9-80f6-508a3406aa2a\") " Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.831438 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/205e2c3c-82ef-4aaa-b1b3-0465a855bde9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "205e2c3c-82ef-4aaa-b1b3-0465a855bde9" (UID: "205e2c3c-82ef-4aaa-b1b3-0465a855bde9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.831483 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/646de171-9e06-4df6-9e1e-07319c04b95c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "646de171-9e06-4df6-9e1e-07319c04b95c" (UID: "646de171-9e06-4df6-9e1e-07319c04b95c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.831687 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ba9e1b4-f385-47f9-80f6-508a3406aa2a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7ba9e1b4-f385-47f9-80f6-508a3406aa2a" (UID: "7ba9e1b4-f385-47f9-80f6-508a3406aa2a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.834635 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/205e2c3c-82ef-4aaa-b1b3-0465a855bde9-kube-api-access-8m6pj" (OuterVolumeSpecName: "kube-api-access-8m6pj") pod "205e2c3c-82ef-4aaa-b1b3-0465a855bde9" (UID: "205e2c3c-82ef-4aaa-b1b3-0465a855bde9"). InnerVolumeSpecName "kube-api-access-8m6pj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.834869 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/646de171-9e06-4df6-9e1e-07319c04b95c-kube-api-access-6mhst" (OuterVolumeSpecName: "kube-api-access-6mhst") pod "646de171-9e06-4df6-9e1e-07319c04b95c" (UID: "646de171-9e06-4df6-9e1e-07319c04b95c"). InnerVolumeSpecName "kube-api-access-6mhst". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.834938 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ba9e1b4-f385-47f9-80f6-508a3406aa2a-kube-api-access-htzmc" (OuterVolumeSpecName: "kube-api-access-htzmc") pod "7ba9e1b4-f385-47f9-80f6-508a3406aa2a" (UID: "7ba9e1b4-f385-47f9-80f6-508a3406aa2a"). InnerVolumeSpecName "kube-api-access-htzmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.932819 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htzmc\" (UniqueName: \"kubernetes.io/projected/7ba9e1b4-f385-47f9-80f6-508a3406aa2a-kube-api-access-htzmc\") on node \"crc\" DevicePath \"\"" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.932868 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mhst\" (UniqueName: \"kubernetes.io/projected/646de171-9e06-4df6-9e1e-07319c04b95c-kube-api-access-6mhst\") on node \"crc\" DevicePath \"\"" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.932879 4787 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ba9e1b4-f385-47f9-80f6-508a3406aa2a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.932891 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8m6pj\" (UniqueName: \"kubernetes.io/projected/205e2c3c-82ef-4aaa-b1b3-0465a855bde9-kube-api-access-8m6pj\") on node \"crc\" DevicePath \"\"" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.932907 4787 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/205e2c3c-82ef-4aaa-b1b3-0465a855bde9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 27 08:09:37 crc kubenswrapper[4787]: I0127 08:09:37.932918 4787 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/646de171-9e06-4df6-9e1e-07319c04b95c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 27 08:09:38 crc kubenswrapper[4787]: I0127 08:09:38.213868 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-create-8rt5v" event={"ID":"7ba9e1b4-f385-47f9-80f6-508a3406aa2a","Type":"ContainerDied","Data":"644b272208b8e1b224dfece73de7f0ce1f7d4fb488f30fb14df01a26475a730e"} Jan 27 08:09:38 crc kubenswrapper[4787]: I0127 08:09:38.213922 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="644b272208b8e1b224dfece73de7f0ce1f7d4fb488f30fb14df01a26475a730e" Jan 27 08:09:38 crc kubenswrapper[4787]: I0127 08:09:38.213950 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-create-8rt5v" Jan 27 08:09:38 crc kubenswrapper[4787]: I0127 08:09:38.216437 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-5828-account-create-update-xh7r6" event={"ID":"646de171-9e06-4df6-9e1e-07319c04b95c","Type":"ContainerDied","Data":"61ebf61c0ee28539578df85a272d783dc4becc606055385faa63ee88cef6d0cf"} Jan 27 08:09:38 crc kubenswrapper[4787]: I0127 08:09:38.216474 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-5828-account-create-update-xh7r6" Jan 27 08:09:38 crc kubenswrapper[4787]: I0127 08:09:38.216494 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61ebf61c0ee28539578df85a272d783dc4becc606055385faa63ee88cef6d0cf" Jan 27 08:09:38 crc kubenswrapper[4787]: I0127 08:09:38.223204 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-create-wjmqh" event={"ID":"205e2c3c-82ef-4aaa-b1b3-0465a855bde9","Type":"ContainerDied","Data":"3693ebfab339d06c27513a356c82a28e6e678e9480e3c3b9fac9bc0c2d5ed91b"} Jan 27 08:09:38 crc kubenswrapper[4787]: I0127 08:09:38.223277 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3693ebfab339d06c27513a356c82a28e6e678e9480e3c3b9fac9bc0c2d5ed91b" Jan 27 08:09:38 crc kubenswrapper[4787]: I0127 08:09:38.223420 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-create-wjmqh" Jan 27 08:09:38 crc kubenswrapper[4787]: I0127 08:09:38.229401 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-6d8d-account-create-update-5jvm9" event={"ID":"2e3d75aa-1156-48f5-9435-b8b3cd2ec555","Type":"ContainerDied","Data":"b404522cda13cd80426b31e5ac61fc611fc37a31277fcf44a25db1d76987c6f3"} Jan 27 08:09:38 crc kubenswrapper[4787]: I0127 08:09:38.229491 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b404522cda13cd80426b31e5ac61fc611fc37a31277fcf44a25db1d76987c6f3" Jan 27 08:09:38 crc kubenswrapper[4787]: I0127 08:09:38.229656 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-6d8d-account-create-update-5jvm9" Jan 27 08:09:42 crc kubenswrapper[4787]: I0127 08:09:42.261879 4787 generic.go:334] "Generic (PLEG): container finished" podID="f84bfff4-b4f2-40d8-8b81-a9e5eb776442" containerID="b35fd8bea3d2d9361614b10dbc01f3a16983ab9d5a38a13fd4df6390c81bb921" exitCode=0 Jan 27 08:09:42 crc kubenswrapper[4787]: I0127 08:09:42.262068 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-server-0" event={"ID":"f84bfff4-b4f2-40d8-8b81-a9e5eb776442","Type":"ContainerDied","Data":"b35fd8bea3d2d9361614b10dbc01f3a16983ab9d5a38a13fd4df6390c81bb921"} Jan 27 08:09:42 crc kubenswrapper[4787]: I0127 08:09:42.268575 4787 generic.go:334] "Generic (PLEG): container finished" podID="1d9a6f95-2510-4e74-b4f7-fb592d761c91" containerID="013b2f12459c08307712f9e501100fc81d14973b010bcdc9c24006a8c2afc6b8" exitCode=0 Jan 27 08:09:42 crc kubenswrapper[4787]: I0127 08:09:42.268645 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-notifications-server-0" event={"ID":"1d9a6f95-2510-4e74-b4f7-fb592d761c91","Type":"ContainerDied","Data":"013b2f12459c08307712f9e501100fc81d14973b010bcdc9c24006a8c2afc6b8"} Jan 27 08:09:43 crc kubenswrapper[4787]: I0127 08:09:43.279309 4787 generic.go:334] "Generic (PLEG): container finished" podID="faff0a15-880a-4cf7-a0e0-81d573ace274" containerID="e5125bf2a44027a200b519f739fdd0eef6bd684deedf878b3831835beda70a71" exitCode=0 Jan 27 08:09:43 crc kubenswrapper[4787]: I0127 08:09:43.279398 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-cell1-server-0" event={"ID":"faff0a15-880a-4cf7-a0e0-81d573ace274","Type":"ContainerDied","Data":"e5125bf2a44027a200b519f739fdd0eef6bd684deedf878b3831835beda70a71"} Jan 27 08:09:43 crc kubenswrapper[4787]: I0127 08:09:43.289460 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-notifications-server-0" event={"ID":"1d9a6f95-2510-4e74-b4f7-fb592d761c91","Type":"ContainerStarted","Data":"41a8905f95161f2a326d5047b8d1d059604e94f798b145536a28b7152f8c2dae"} Jan 27 08:09:43 crc kubenswrapper[4787]: I0127 08:09:43.290689 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:09:43 crc kubenswrapper[4787]: I0127 08:09:43.297212 4787 generic.go:334] "Generic (PLEG): container finished" podID="68be2504-1de8-427c-9937-bd0428f7c5c4" containerID="60be7c927e866796541fb6c53b2b04db37adbfe1b02bf03938d89ea5c5370982" exitCode=0 Jan 27 08:09:43 crc kubenswrapper[4787]: I0127 08:09:43.297326 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" event={"ID":"68be2504-1de8-427c-9937-bd0428f7c5c4","Type":"ContainerDied","Data":"60be7c927e866796541fb6c53b2b04db37adbfe1b02bf03938d89ea5c5370982"} Jan 27 08:09:43 crc kubenswrapper[4787]: I0127 08:09:43.301460 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-server-0" event={"ID":"f84bfff4-b4f2-40d8-8b81-a9e5eb776442","Type":"ContainerStarted","Data":"61897f0c250cb27f30a8bbf48aae1210700d43e93c5aad704d626be0a37a1f7e"} Jan 27 08:09:43 crc kubenswrapper[4787]: I0127 08:09:43.302333 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:09:43 crc kubenswrapper[4787]: I0127 08:09:43.344312 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/rabbitmq-server-0" podStartSLOduration=38.044440905 podStartE2EDuration="52.34428576s" podCreationTimestamp="2026-01-27 08:08:51 +0000 UTC" firstStartedPulling="2026-01-27 08:08:53.552885947 +0000 UTC m=+1039.205241429" lastFinishedPulling="2026-01-27 08:09:07.852730792 +0000 UTC m=+1053.505086284" observedRunningTime="2026-01-27 08:09:43.336260464 +0000 UTC m=+1088.988615976" watchObservedRunningTime="2026-01-27 08:09:43.34428576 +0000 UTC m=+1088.996641312" Jan 27 08:09:43 crc kubenswrapper[4787]: I0127 08:09:43.398487 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/rabbitmq-notifications-server-0" podStartSLOduration=38.174741592 podStartE2EDuration="51.398453762s" podCreationTimestamp="2026-01-27 08:08:52 +0000 UTC" firstStartedPulling="2026-01-27 08:08:54.675043597 +0000 UTC m=+1040.327399089" lastFinishedPulling="2026-01-27 08:09:07.898755767 +0000 UTC m=+1053.551111259" observedRunningTime="2026-01-27 08:09:43.394397118 +0000 UTC m=+1089.046752620" watchObservedRunningTime="2026-01-27 08:09:43.398453762 +0000 UTC m=+1089.050809254" Jan 27 08:09:44 crc kubenswrapper[4787]: I0127 08:09:44.316536 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-cell1-server-0" event={"ID":"faff0a15-880a-4cf7-a0e0-81d573ace274","Type":"ContainerStarted","Data":"912d2fdf7d631c89ca941a924532475a7eb85dfa59356b1adf65daa134ee919f"} Jan 27 08:09:44 crc kubenswrapper[4787]: I0127 08:09:44.319865 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" event={"ID":"68be2504-1de8-427c-9937-bd0428f7c5c4","Type":"ContainerStarted","Data":"86d9ea0f6998d29e24154814c190683b5445a6bb1e46dece11aa76d82707ed56"} Jan 27 08:09:44 crc kubenswrapper[4787]: I0127 08:09:44.348633 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/rabbitmq-cell1-server-0" podStartSLOduration=38.661062162 podStartE2EDuration="52.348611863s" podCreationTimestamp="2026-01-27 08:08:52 +0000 UTC" firstStartedPulling="2026-01-27 08:08:54.167359451 +0000 UTC m=+1039.819714943" lastFinishedPulling="2026-01-27 08:09:07.854909152 +0000 UTC m=+1053.507264644" observedRunningTime="2026-01-27 08:09:44.339574714 +0000 UTC m=+1089.991930236" watchObservedRunningTime="2026-01-27 08:09:44.348611863 +0000 UTC m=+1090.000967355" Jan 27 08:09:44 crc kubenswrapper[4787]: I0127 08:09:44.369768 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" podStartSLOduration=39.198435332 podStartE2EDuration="53.369739432s" podCreationTimestamp="2026-01-27 08:08:51 +0000 UTC" firstStartedPulling="2026-01-27 08:08:53.655707558 +0000 UTC m=+1039.308063050" lastFinishedPulling="2026-01-27 08:09:07.827011658 +0000 UTC m=+1053.479367150" observedRunningTime="2026-01-27 08:09:44.365186917 +0000 UTC m=+1090.017542419" watchObservedRunningTime="2026-01-27 08:09:44.369739432 +0000 UTC m=+1090.022094924" Jan 27 08:09:53 crc kubenswrapper[4787]: I0127 08:09:53.094785 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/rabbitmq-server-0" podUID="f84bfff4-b4f2-40d8-8b81-a9e5eb776442" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.95:5672: connect: connection refused" Jan 27 08:09:53 crc kubenswrapper[4787]: I0127 08:09:53.383437 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:09:53 crc kubenswrapper[4787]: I0127 08:09:53.384683 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" podUID="68be2504-1de8-427c-9937-bd0428f7c5c4" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.96:5672: connect: connection refused" Jan 27 08:09:53 crc kubenswrapper[4787]: I0127 08:09:53.385376 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" podUID="68be2504-1de8-427c-9937-bd0428f7c5c4" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.96:5672: connect: connection refused" Jan 27 08:09:53 crc kubenswrapper[4787]: I0127 08:09:53.689625 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:09:53 crc kubenswrapper[4787]: I0127 08:09:53.691534 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/rabbitmq-cell1-server-0" podUID="faff0a15-880a-4cf7-a0e0-81d573ace274" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.97:5672: connect: connection refused" Jan 27 08:09:53 crc kubenswrapper[4787]: I0127 08:09:53.691936 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/rabbitmq-cell1-server-0" podUID="faff0a15-880a-4cf7-a0e0-81d573ace274" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.97:5672: connect: connection refused" Jan 27 08:09:54 crc kubenswrapper[4787]: I0127 08:09:54.039733 4787 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/rabbitmq-notifications-server-0" podUID="1d9a6f95-2510-4e74-b4f7-fb592d761c91" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5672: connect: connection refused" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.093519 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/rabbitmq-server-0" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.385776 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.690738 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.777391 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-db-sync-ztn8q"] Jan 27 08:10:03 crc kubenswrapper[4787]: E0127 08:10:03.777892 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e3d75aa-1156-48f5-9435-b8b3cd2ec555" containerName="mariadb-account-create-update" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.777919 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e3d75aa-1156-48f5-9435-b8b3cd2ec555" containerName="mariadb-account-create-update" Jan 27 08:10:03 crc kubenswrapper[4787]: E0127 08:10:03.777941 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3150913-c395-4ed0-a5d3-616426f58671" containerName="mariadb-account-create-update" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.777950 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3150913-c395-4ed0-a5d3-616426f58671" containerName="mariadb-account-create-update" Jan 27 08:10:03 crc kubenswrapper[4787]: E0127 08:10:03.777974 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="205e2c3c-82ef-4aaa-b1b3-0465a855bde9" containerName="mariadb-database-create" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.777981 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="205e2c3c-82ef-4aaa-b1b3-0465a855bde9" containerName="mariadb-database-create" Jan 27 08:10:03 crc kubenswrapper[4787]: E0127 08:10:03.777998 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="646de171-9e06-4df6-9e1e-07319c04b95c" containerName="mariadb-account-create-update" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.778005 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="646de171-9e06-4df6-9e1e-07319c04b95c" containerName="mariadb-account-create-update" Jan 27 08:10:03 crc kubenswrapper[4787]: E0127 08:10:03.778018 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ba9e1b4-f385-47f9-80f6-508a3406aa2a" containerName="mariadb-database-create" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.778025 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ba9e1b4-f385-47f9-80f6-508a3406aa2a" containerName="mariadb-database-create" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.778224 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="205e2c3c-82ef-4aaa-b1b3-0465a855bde9" containerName="mariadb-database-create" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.778243 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3150913-c395-4ed0-a5d3-616426f58671" containerName="mariadb-account-create-update" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.778256 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="646de171-9e06-4df6-9e1e-07319c04b95c" containerName="mariadb-account-create-update" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.778272 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ba9e1b4-f385-47f9-80f6-508a3406aa2a" containerName="mariadb-database-create" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.778284 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e3d75aa-1156-48f5-9435-b8b3cd2ec555" containerName="mariadb-account-create-update" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.779156 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-sync-ztn8q" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.782367 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-scripts" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.782652 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-config-data" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.782852 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-keystone-dockercfg-z2dpt" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.782905 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.785517 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-db-sync-ztn8q"] Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.875648 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nn7r\" (UniqueName: \"kubernetes.io/projected/c5da7883-eba0-4fa4-92c5-9085b0ce0090-kube-api-access-5nn7r\") pod \"keystone-db-sync-ztn8q\" (UID: \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\") " pod="nova-kuttl-default/keystone-db-sync-ztn8q" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.875783 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5da7883-eba0-4fa4-92c5-9085b0ce0090-config-data\") pod \"keystone-db-sync-ztn8q\" (UID: \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\") " pod="nova-kuttl-default/keystone-db-sync-ztn8q" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.875875 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5da7883-eba0-4fa4-92c5-9085b0ce0090-combined-ca-bundle\") pod \"keystone-db-sync-ztn8q\" (UID: \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\") " pod="nova-kuttl-default/keystone-db-sync-ztn8q" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.977709 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5da7883-eba0-4fa4-92c5-9085b0ce0090-config-data\") pod \"keystone-db-sync-ztn8q\" (UID: \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\") " pod="nova-kuttl-default/keystone-db-sync-ztn8q" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.977815 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5da7883-eba0-4fa4-92c5-9085b0ce0090-combined-ca-bundle\") pod \"keystone-db-sync-ztn8q\" (UID: \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\") " pod="nova-kuttl-default/keystone-db-sync-ztn8q" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.977878 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nn7r\" (UniqueName: \"kubernetes.io/projected/c5da7883-eba0-4fa4-92c5-9085b0ce0090-kube-api-access-5nn7r\") pod \"keystone-db-sync-ztn8q\" (UID: \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\") " pod="nova-kuttl-default/keystone-db-sync-ztn8q" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.986212 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5da7883-eba0-4fa4-92c5-9085b0ce0090-combined-ca-bundle\") pod \"keystone-db-sync-ztn8q\" (UID: \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\") " pod="nova-kuttl-default/keystone-db-sync-ztn8q" Jan 27 08:10:03 crc kubenswrapper[4787]: I0127 08:10:03.986262 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5da7883-eba0-4fa4-92c5-9085b0ce0090-config-data\") pod \"keystone-db-sync-ztn8q\" (UID: \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\") " pod="nova-kuttl-default/keystone-db-sync-ztn8q" Jan 27 08:10:04 crc kubenswrapper[4787]: I0127 08:10:04.001819 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nn7r\" (UniqueName: \"kubernetes.io/projected/c5da7883-eba0-4fa4-92c5-9085b0ce0090-kube-api-access-5nn7r\") pod \"keystone-db-sync-ztn8q\" (UID: \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\") " pod="nova-kuttl-default/keystone-db-sync-ztn8q" Jan 27 08:10:04 crc kubenswrapper[4787]: I0127 08:10:04.038144 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/rabbitmq-notifications-server-0" Jan 27 08:10:04 crc kubenswrapper[4787]: I0127 08:10:04.101192 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-sync-ztn8q" Jan 27 08:10:04 crc kubenswrapper[4787]: I0127 08:10:04.611519 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-db-sync-ztn8q"] Jan 27 08:10:05 crc kubenswrapper[4787]: I0127 08:10:05.495995 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-sync-ztn8q" event={"ID":"c5da7883-eba0-4fa4-92c5-9085b0ce0090","Type":"ContainerStarted","Data":"577257a6a0504fab18c8af07f7f4643b2ff43f7abb2b83cd8e0f2644b25042a5"} Jan 27 08:10:12 crc kubenswrapper[4787]: I0127 08:10:12.608679 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-sync-ztn8q" event={"ID":"c5da7883-eba0-4fa4-92c5-9085b0ce0090","Type":"ContainerStarted","Data":"fdbb6282b854859c8ab712b9eae897a43f0552adf90b843b092e2da77b58ae00"} Jan 27 08:10:15 crc kubenswrapper[4787]: I0127 08:10:15.636440 4787 generic.go:334] "Generic (PLEG): container finished" podID="c5da7883-eba0-4fa4-92c5-9085b0ce0090" containerID="fdbb6282b854859c8ab712b9eae897a43f0552adf90b843b092e2da77b58ae00" exitCode=0 Jan 27 08:10:15 crc kubenswrapper[4787]: I0127 08:10:15.636533 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-sync-ztn8q" event={"ID":"c5da7883-eba0-4fa4-92c5-9085b0ce0090","Type":"ContainerDied","Data":"fdbb6282b854859c8ab712b9eae897a43f0552adf90b843b092e2da77b58ae00"} Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.026044 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-sync-ztn8q" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.133064 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nn7r\" (UniqueName: \"kubernetes.io/projected/c5da7883-eba0-4fa4-92c5-9085b0ce0090-kube-api-access-5nn7r\") pod \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\" (UID: \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\") " Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.133157 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5da7883-eba0-4fa4-92c5-9085b0ce0090-config-data\") pod \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\" (UID: \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\") " Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.133409 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5da7883-eba0-4fa4-92c5-9085b0ce0090-combined-ca-bundle\") pod \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\" (UID: \"c5da7883-eba0-4fa4-92c5-9085b0ce0090\") " Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.140645 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5da7883-eba0-4fa4-92c5-9085b0ce0090-kube-api-access-5nn7r" (OuterVolumeSpecName: "kube-api-access-5nn7r") pod "c5da7883-eba0-4fa4-92c5-9085b0ce0090" (UID: "c5da7883-eba0-4fa4-92c5-9085b0ce0090"). InnerVolumeSpecName "kube-api-access-5nn7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.157285 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5da7883-eba0-4fa4-92c5-9085b0ce0090-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5da7883-eba0-4fa4-92c5-9085b0ce0090" (UID: "c5da7883-eba0-4fa4-92c5-9085b0ce0090"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.179940 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5da7883-eba0-4fa4-92c5-9085b0ce0090-config-data" (OuterVolumeSpecName: "config-data") pod "c5da7883-eba0-4fa4-92c5-9085b0ce0090" (UID: "c5da7883-eba0-4fa4-92c5-9085b0ce0090"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.235649 4787 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5da7883-eba0-4fa4-92c5-9085b0ce0090-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.235694 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nn7r\" (UniqueName: \"kubernetes.io/projected/c5da7883-eba0-4fa4-92c5-9085b0ce0090-kube-api-access-5nn7r\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.235707 4787 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5da7883-eba0-4fa4-92c5-9085b0ce0090-config-data\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.663363 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-sync-ztn8q" event={"ID":"c5da7883-eba0-4fa4-92c5-9085b0ce0090","Type":"ContainerDied","Data":"577257a6a0504fab18c8af07f7f4643b2ff43f7abb2b83cd8e0f2644b25042a5"} Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.663423 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-sync-ztn8q" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.663446 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="577257a6a0504fab18c8af07f7f4643b2ff43f7abb2b83cd8e0f2644b25042a5" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.891249 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-bootstrap-gzqt8"] Jan 27 08:10:17 crc kubenswrapper[4787]: E0127 08:10:17.891743 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5da7883-eba0-4fa4-92c5-9085b0ce0090" containerName="keystone-db-sync" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.891768 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5da7883-eba0-4fa4-92c5-9085b0ce0090" containerName="keystone-db-sync" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.891997 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5da7883-eba0-4fa4-92c5-9085b0ce0090" containerName="keystone-db-sync" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.892838 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.904313 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.904591 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-config-data" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.904757 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-keystone-dockercfg-z2dpt" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.904792 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"osp-secret" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.904893 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-scripts" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.910288 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-gzqt8"] Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.946928 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-fernet-keys\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.946979 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-credential-keys\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.946997 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-combined-ca-bundle\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.947038 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bn7jh\" (UniqueName: \"kubernetes.io/projected/25481a95-2112-4569-8db7-38a3bc2f2137-kube-api-access-bn7jh\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.947293 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-config-data\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:17 crc kubenswrapper[4787]: I0127 08:10:17.947452 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-scripts\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.049383 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-config-data\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.049472 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-scripts\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.049784 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-fernet-keys\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.049806 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-credential-keys\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.049823 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-combined-ca-bundle\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.049876 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bn7jh\" (UniqueName: \"kubernetes.io/projected/25481a95-2112-4569-8db7-38a3bc2f2137-kube-api-access-bn7jh\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.055543 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-combined-ca-bundle\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.056876 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-config-data\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.062175 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-scripts\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.079330 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-credential-keys\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.089423 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-fernet-keys\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.102304 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bn7jh\" (UniqueName: \"kubernetes.io/projected/25481a95-2112-4569-8db7-38a3bc2f2137-kube-api-access-bn7jh\") pod \"keystone-bootstrap-gzqt8\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.149595 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/placement-db-sync-7x42z"] Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.151074 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.170829 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-config-data" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.171289 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-scripts" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.182162 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-placement-dockercfg-7wn8v" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.185644 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-db-sync-7x42z"] Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.222020 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.254767 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-logs\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.255206 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-combined-ca-bundle\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.255313 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-config-data\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.255433 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pfpx\" (UniqueName: \"kubernetes.io/projected/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-kube-api-access-5pfpx\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.255515 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-scripts\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.357803 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-logs\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.358311 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-combined-ca-bundle\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.358346 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-config-data\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.358403 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pfpx\" (UniqueName: \"kubernetes.io/projected/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-kube-api-access-5pfpx\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.358426 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-scripts\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.363304 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-scripts\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.363331 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-logs\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.366283 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-combined-ca-bundle\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.367389 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-config-data\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.395386 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pfpx\" (UniqueName: \"kubernetes.io/projected/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-kube-api-access-5pfpx\") pod \"placement-db-sync-7x42z\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.494594 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:18 crc kubenswrapper[4787]: I0127 08:10:18.799937 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-gzqt8"] Jan 27 08:10:19 crc kubenswrapper[4787]: I0127 08:10:19.004326 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-db-sync-7x42z"] Jan 27 08:10:19 crc kubenswrapper[4787]: I0127 08:10:19.691674 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-sync-7x42z" event={"ID":"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8","Type":"ContainerStarted","Data":"872a1d52e4e7762f61f87cf9f200fc0c3aae697c3f41a540eb1bc95d926012b6"} Jan 27 08:10:19 crc kubenswrapper[4787]: I0127 08:10:19.699229 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-gzqt8" event={"ID":"25481a95-2112-4569-8db7-38a3bc2f2137","Type":"ContainerStarted","Data":"50c79c9d4510d9aad33c8c6b7d7b07dfbec318be7cf6d78cc13f3f9ebe9f87c3"} Jan 27 08:10:19 crc kubenswrapper[4787]: I0127 08:10:19.699276 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-gzqt8" event={"ID":"25481a95-2112-4569-8db7-38a3bc2f2137","Type":"ContainerStarted","Data":"513f29482f207a5eac5aadf34dfc86089d3ef24e51546c32e97b5dc398026284"} Jan 27 08:10:22 crc kubenswrapper[4787]: I0127 08:10:22.736187 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-sync-7x42z" event={"ID":"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8","Type":"ContainerStarted","Data":"144343248d2b16217cf2dcb9392f4fccabd0a94e089b16a6bacae928d2fbb414"} Jan 27 08:10:22 crc kubenswrapper[4787]: I0127 08:10:22.738125 4787 generic.go:334] "Generic (PLEG): container finished" podID="25481a95-2112-4569-8db7-38a3bc2f2137" containerID="50c79c9d4510d9aad33c8c6b7d7b07dfbec318be7cf6d78cc13f3f9ebe9f87c3" exitCode=0 Jan 27 08:10:22 crc kubenswrapper[4787]: I0127 08:10:22.738170 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-gzqt8" event={"ID":"25481a95-2112-4569-8db7-38a3bc2f2137","Type":"ContainerDied","Data":"50c79c9d4510d9aad33c8c6b7d7b07dfbec318be7cf6d78cc13f3f9ebe9f87c3"} Jan 27 08:10:22 crc kubenswrapper[4787]: I0127 08:10:22.758013 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/placement-db-sync-7x42z" podStartSLOduration=1.541027396 podStartE2EDuration="4.757989944s" podCreationTimestamp="2026-01-27 08:10:18 +0000 UTC" firstStartedPulling="2026-01-27 08:10:19.008190455 +0000 UTC m=+1124.660545947" lastFinishedPulling="2026-01-27 08:10:22.225153003 +0000 UTC m=+1127.877508495" observedRunningTime="2026-01-27 08:10:22.757291657 +0000 UTC m=+1128.409647179" watchObservedRunningTime="2026-01-27 08:10:22.757989944 +0000 UTC m=+1128.410345436" Jan 27 08:10:22 crc kubenswrapper[4787]: I0127 08:10:22.758830 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-bootstrap-gzqt8" podStartSLOduration=5.758825375 podStartE2EDuration="5.758825375s" podCreationTimestamp="2026-01-27 08:10:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 08:10:19.725226829 +0000 UTC m=+1125.377582321" watchObservedRunningTime="2026-01-27 08:10:22.758825375 +0000 UTC m=+1128.411180877" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.108770 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.187530 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-config-data\") pod \"25481a95-2112-4569-8db7-38a3bc2f2137\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.187692 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-combined-ca-bundle\") pod \"25481a95-2112-4569-8db7-38a3bc2f2137\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.187790 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-fernet-keys\") pod \"25481a95-2112-4569-8db7-38a3bc2f2137\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.187927 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-scripts\") pod \"25481a95-2112-4569-8db7-38a3bc2f2137\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.187969 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-credential-keys\") pod \"25481a95-2112-4569-8db7-38a3bc2f2137\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.188531 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bn7jh\" (UniqueName: \"kubernetes.io/projected/25481a95-2112-4569-8db7-38a3bc2f2137-kube-api-access-bn7jh\") pod \"25481a95-2112-4569-8db7-38a3bc2f2137\" (UID: \"25481a95-2112-4569-8db7-38a3bc2f2137\") " Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.193919 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-scripts" (OuterVolumeSpecName: "scripts") pod "25481a95-2112-4569-8db7-38a3bc2f2137" (UID: "25481a95-2112-4569-8db7-38a3bc2f2137"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.193940 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "25481a95-2112-4569-8db7-38a3bc2f2137" (UID: "25481a95-2112-4569-8db7-38a3bc2f2137"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.194377 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "25481a95-2112-4569-8db7-38a3bc2f2137" (UID: "25481a95-2112-4569-8db7-38a3bc2f2137"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.194632 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25481a95-2112-4569-8db7-38a3bc2f2137-kube-api-access-bn7jh" (OuterVolumeSpecName: "kube-api-access-bn7jh") pod "25481a95-2112-4569-8db7-38a3bc2f2137" (UID: "25481a95-2112-4569-8db7-38a3bc2f2137"). InnerVolumeSpecName "kube-api-access-bn7jh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.210221 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-config-data" (OuterVolumeSpecName: "config-data") pod "25481a95-2112-4569-8db7-38a3bc2f2137" (UID: "25481a95-2112-4569-8db7-38a3bc2f2137"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.213797 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25481a95-2112-4569-8db7-38a3bc2f2137" (UID: "25481a95-2112-4569-8db7-38a3bc2f2137"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.290926 4787 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-scripts\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.290973 4787 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.290988 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bn7jh\" (UniqueName: \"kubernetes.io/projected/25481a95-2112-4569-8db7-38a3bc2f2137-kube-api-access-bn7jh\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.291003 4787 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-config-data\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.291018 4787 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.291031 4787 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/25481a95-2112-4569-8db7-38a3bc2f2137-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.760839 4787 generic.go:334] "Generic (PLEG): container finished" podID="54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8" containerID="144343248d2b16217cf2dcb9392f4fccabd0a94e089b16a6bacae928d2fbb414" exitCode=0 Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.760938 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-sync-7x42z" event={"ID":"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8","Type":"ContainerDied","Data":"144343248d2b16217cf2dcb9392f4fccabd0a94e089b16a6bacae928d2fbb414"} Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.765673 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-gzqt8" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.765627 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-gzqt8" event={"ID":"25481a95-2112-4569-8db7-38a3bc2f2137","Type":"ContainerDied","Data":"513f29482f207a5eac5aadf34dfc86089d3ef24e51546c32e97b5dc398026284"} Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.765963 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="513f29482f207a5eac5aadf34dfc86089d3ef24e51546c32e97b5dc398026284" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.878266 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-gzqt8"] Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.886101 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-gzqt8"] Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.968165 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-bootstrap-bn5rd"] Jan 27 08:10:24 crc kubenswrapper[4787]: E0127 08:10:24.968641 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25481a95-2112-4569-8db7-38a3bc2f2137" containerName="keystone-bootstrap" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.968658 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="25481a95-2112-4569-8db7-38a3bc2f2137" containerName="keystone-bootstrap" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.968873 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="25481a95-2112-4569-8db7-38a3bc2f2137" containerName="keystone-bootstrap" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.969800 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.972260 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-scripts" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.972488 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-config-data" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.972487 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.978663 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"osp-secret" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.979280 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-keystone-dockercfg-z2dpt" Jan 27 08:10:24 crc kubenswrapper[4787]: I0127 08:10:24.990432 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-bn5rd"] Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.089108 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25481a95-2112-4569-8db7-38a3bc2f2137" path="/var/lib/kubelet/pods/25481a95-2112-4569-8db7-38a3bc2f2137/volumes" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.118187 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-combined-ca-bundle\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.118253 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-credential-keys\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.118750 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-config-data\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.118859 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vb569\" (UniqueName: \"kubernetes.io/projected/6d885638-7008-41c1-85fc-43025469e404-kube-api-access-vb569\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.118897 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-fernet-keys\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.118927 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-scripts\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.220594 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-config-data\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.220681 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vb569\" (UniqueName: \"kubernetes.io/projected/6d885638-7008-41c1-85fc-43025469e404-kube-api-access-vb569\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.220713 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-fernet-keys\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.220748 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-scripts\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.220799 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-combined-ca-bundle\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.220825 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-credential-keys\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.227167 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-config-data\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.227364 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-scripts\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.227642 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-fernet-keys\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.228297 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-credential-keys\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.228968 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-combined-ca-bundle\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.242505 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vb569\" (UniqueName: \"kubernetes.io/projected/6d885638-7008-41c1-85fc-43025469e404-kube-api-access-vb569\") pod \"keystone-bootstrap-bn5rd\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.296675 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.733700 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-bn5rd"] Jan 27 08:10:25 crc kubenswrapper[4787]: W0127 08:10:25.739279 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d885638_7008_41c1_85fc_43025469e404.slice/crio-1539c8370b4e55ac0c5e0d82bf5a698b7c243437a52cb95a840651309378bd7c WatchSource:0}: Error finding container 1539c8370b4e55ac0c5e0d82bf5a698b7c243437a52cb95a840651309378bd7c: Status 404 returned error can't find the container with id 1539c8370b4e55ac0c5e0d82bf5a698b7c243437a52cb95a840651309378bd7c Jan 27 08:10:25 crc kubenswrapper[4787]: I0127 08:10:25.775413 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-bn5rd" event={"ID":"6d885638-7008-41c1-85fc-43025469e404","Type":"ContainerStarted","Data":"1539c8370b4e55ac0c5e0d82bf5a698b7c243437a52cb95a840651309378bd7c"} Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.091833 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.240913 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-config-data\") pod \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.241883 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-logs\") pod \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.242005 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-scripts\") pod \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.242356 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-combined-ca-bundle\") pod \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.242385 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pfpx\" (UniqueName: \"kubernetes.io/projected/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-kube-api-access-5pfpx\") pod \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\" (UID: \"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8\") " Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.243075 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-logs" (OuterVolumeSpecName: "logs") pod "54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8" (UID: "54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.247857 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-scripts" (OuterVolumeSpecName: "scripts") pod "54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8" (UID: "54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.248786 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-kube-api-access-5pfpx" (OuterVolumeSpecName: "kube-api-access-5pfpx") pod "54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8" (UID: "54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8"). InnerVolumeSpecName "kube-api-access-5pfpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.268918 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-config-data" (OuterVolumeSpecName: "config-data") pod "54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8" (UID: "54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.271954 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8" (UID: "54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.344535 4787 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.344634 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pfpx\" (UniqueName: \"kubernetes.io/projected/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-kube-api-access-5pfpx\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.344667 4787 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-config-data\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.344694 4787 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-logs\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.344716 4787 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8-scripts\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.801841 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-sync-7x42z" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.801883 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-sync-7x42z" event={"ID":"54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8","Type":"ContainerDied","Data":"872a1d52e4e7762f61f87cf9f200fc0c3aae697c3f41a540eb1bc95d926012b6"} Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.802684 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="872a1d52e4e7762f61f87cf9f200fc0c3aae697c3f41a540eb1bc95d926012b6" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.808050 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-bn5rd" event={"ID":"6d885638-7008-41c1-85fc-43025469e404","Type":"ContainerStarted","Data":"dcf70ae1779bf3b7d74781efddb0114399b6ad2886db99535b6b73b54d7d6a8a"} Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.849616 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-bootstrap-bn5rd" podStartSLOduration=2.849593424 podStartE2EDuration="2.849593424s" podCreationTimestamp="2026-01-27 08:10:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 08:10:26.848115918 +0000 UTC m=+1132.500471430" watchObservedRunningTime="2026-01-27 08:10:26.849593424 +0000 UTC m=+1132.501948926" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.919211 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/placement-545ff75684-kqpgf"] Jan 27 08:10:26 crc kubenswrapper[4787]: E0127 08:10:26.919722 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8" containerName="placement-db-sync" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.919746 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8" containerName="placement-db-sync" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.922650 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8" containerName="placement-db-sync" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.924156 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.930226 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-scripts" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.930765 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-config-data" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.932119 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-placement-dockercfg-7wn8v" Jan 27 08:10:26 crc kubenswrapper[4787]: I0127 08:10:26.936847 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-545ff75684-kqpgf"] Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.056546 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4kkh\" (UniqueName: \"kubernetes.io/projected/7d647302-7ff9-465d-b5a5-a3a76f35df28-kube-api-access-p4kkh\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.056986 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d647302-7ff9-465d-b5a5-a3a76f35df28-logs\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.057212 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d647302-7ff9-465d-b5a5-a3a76f35df28-scripts\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.057309 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d647302-7ff9-465d-b5a5-a3a76f35df28-combined-ca-bundle\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.057418 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d647302-7ff9-465d-b5a5-a3a76f35df28-config-data\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.159633 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d647302-7ff9-465d-b5a5-a3a76f35df28-config-data\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.159797 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4kkh\" (UniqueName: \"kubernetes.io/projected/7d647302-7ff9-465d-b5a5-a3a76f35df28-kube-api-access-p4kkh\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.159902 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d647302-7ff9-465d-b5a5-a3a76f35df28-logs\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.160059 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d647302-7ff9-465d-b5a5-a3a76f35df28-scripts\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.160422 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d647302-7ff9-465d-b5a5-a3a76f35df28-combined-ca-bundle\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.164196 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d647302-7ff9-465d-b5a5-a3a76f35df28-logs\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.169177 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d647302-7ff9-465d-b5a5-a3a76f35df28-scripts\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.179435 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d647302-7ff9-465d-b5a5-a3a76f35df28-config-data\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.179910 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d647302-7ff9-465d-b5a5-a3a76f35df28-combined-ca-bundle\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.191436 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4kkh\" (UniqueName: \"kubernetes.io/projected/7d647302-7ff9-465d-b5a5-a3a76f35df28-kube-api-access-p4kkh\") pod \"placement-545ff75684-kqpgf\" (UID: \"7d647302-7ff9-465d-b5a5-a3a76f35df28\") " pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.245295 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.768125 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-545ff75684-kqpgf"] Jan 27 08:10:27 crc kubenswrapper[4787]: I0127 08:10:27.817302 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-545ff75684-kqpgf" event={"ID":"7d647302-7ff9-465d-b5a5-a3a76f35df28","Type":"ContainerStarted","Data":"2e17e25cf11e8b834817e98069cc4bdbc698fede2ca94b56564f6522ff9488f3"} Jan 27 08:10:30 crc kubenswrapper[4787]: I0127 08:10:30.843226 4787 generic.go:334] "Generic (PLEG): container finished" podID="6d885638-7008-41c1-85fc-43025469e404" containerID="dcf70ae1779bf3b7d74781efddb0114399b6ad2886db99535b6b73b54d7d6a8a" exitCode=0 Jan 27 08:10:30 crc kubenswrapper[4787]: I0127 08:10:30.843380 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-bn5rd" event={"ID":"6d885638-7008-41c1-85fc-43025469e404","Type":"ContainerDied","Data":"dcf70ae1779bf3b7d74781efddb0114399b6ad2886db99535b6b73b54d7d6a8a"} Jan 27 08:10:30 crc kubenswrapper[4787]: I0127 08:10:30.849787 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-545ff75684-kqpgf" event={"ID":"7d647302-7ff9-465d-b5a5-a3a76f35df28","Type":"ContainerStarted","Data":"561fd098a927e0f1c0e5e6d9d248dfa5eaae4828ebf0342272a35906a29f1571"} Jan 27 08:10:30 crc kubenswrapper[4787]: I0127 08:10:30.849957 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-545ff75684-kqpgf" event={"ID":"7d647302-7ff9-465d-b5a5-a3a76f35df28","Type":"ContainerStarted","Data":"bb3404cc80cba5655f9e2ee8dc067fd50f8c9cd1dd1a18135ebb1ca4dfe6afa9"} Jan 27 08:10:30 crc kubenswrapper[4787]: I0127 08:10:30.851430 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:30 crc kubenswrapper[4787]: I0127 08:10:30.851584 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:30 crc kubenswrapper[4787]: I0127 08:10:30.904824 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/placement-545ff75684-kqpgf" podStartSLOduration=4.904799191 podStartE2EDuration="4.904799191s" podCreationTimestamp="2026-01-27 08:10:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 08:10:30.903382116 +0000 UTC m=+1136.555737678" watchObservedRunningTime="2026-01-27 08:10:30.904799191 +0000 UTC m=+1136.557154703" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.220868 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.276094 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-fernet-keys\") pod \"6d885638-7008-41c1-85fc-43025469e404\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.276184 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-combined-ca-bundle\") pod \"6d885638-7008-41c1-85fc-43025469e404\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.276342 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vb569\" (UniqueName: \"kubernetes.io/projected/6d885638-7008-41c1-85fc-43025469e404-kube-api-access-vb569\") pod \"6d885638-7008-41c1-85fc-43025469e404\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.276397 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-scripts\") pod \"6d885638-7008-41c1-85fc-43025469e404\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.276416 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-config-data\") pod \"6d885638-7008-41c1-85fc-43025469e404\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.276476 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-credential-keys\") pod \"6d885638-7008-41c1-85fc-43025469e404\" (UID: \"6d885638-7008-41c1-85fc-43025469e404\") " Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.283620 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d885638-7008-41c1-85fc-43025469e404-kube-api-access-vb569" (OuterVolumeSpecName: "kube-api-access-vb569") pod "6d885638-7008-41c1-85fc-43025469e404" (UID: "6d885638-7008-41c1-85fc-43025469e404"). InnerVolumeSpecName "kube-api-access-vb569". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.283681 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "6d885638-7008-41c1-85fc-43025469e404" (UID: "6d885638-7008-41c1-85fc-43025469e404"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.283828 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-scripts" (OuterVolumeSpecName: "scripts") pod "6d885638-7008-41c1-85fc-43025469e404" (UID: "6d885638-7008-41c1-85fc-43025469e404"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.287502 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "6d885638-7008-41c1-85fc-43025469e404" (UID: "6d885638-7008-41c1-85fc-43025469e404"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.306229 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-config-data" (OuterVolumeSpecName: "config-data") pod "6d885638-7008-41c1-85fc-43025469e404" (UID: "6d885638-7008-41c1-85fc-43025469e404"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.306735 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6d885638-7008-41c1-85fc-43025469e404" (UID: "6d885638-7008-41c1-85fc-43025469e404"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.378470 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vb569\" (UniqueName: \"kubernetes.io/projected/6d885638-7008-41c1-85fc-43025469e404-kube-api-access-vb569\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.378507 4787 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-scripts\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.378516 4787 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-config-data\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.378525 4787 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.378534 4787 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.378542 4787 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d885638-7008-41c1-85fc-43025469e404-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.869951 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-bn5rd" event={"ID":"6d885638-7008-41c1-85fc-43025469e404","Type":"ContainerDied","Data":"1539c8370b4e55ac0c5e0d82bf5a698b7c243437a52cb95a840651309378bd7c"} Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.870401 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1539c8370b4e55ac0c5e0d82bf5a698b7c243437a52cb95a840651309378bd7c" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.870008 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-bn5rd" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.963835 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-678dff9ff-k6jwl"] Jan 27 08:10:32 crc kubenswrapper[4787]: E0127 08:10:32.964216 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d885638-7008-41c1-85fc-43025469e404" containerName="keystone-bootstrap" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.964231 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d885638-7008-41c1-85fc-43025469e404" containerName="keystone-bootstrap" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.964406 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d885638-7008-41c1-85fc-43025469e404" containerName="keystone-bootstrap" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.965066 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.967760 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-keystone-dockercfg-z2dpt" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.971268 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.974536 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-config-data" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.979300 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-scripts" Jan 27 08:10:32 crc kubenswrapper[4787]: I0127 08:10:32.982851 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-678dff9ff-k6jwl"] Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.092141 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llzkr\" (UniqueName: \"kubernetes.io/projected/91494b32-1284-49fb-b548-3fbc5f1e1ddf-kube-api-access-llzkr\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.092198 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-credential-keys\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.092390 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-combined-ca-bundle\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.092756 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-config-data\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.092920 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-fernet-keys\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.093104 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-scripts\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.194593 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llzkr\" (UniqueName: \"kubernetes.io/projected/91494b32-1284-49fb-b548-3fbc5f1e1ddf-kube-api-access-llzkr\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.194644 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-credential-keys\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.194685 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-combined-ca-bundle\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.194752 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-config-data\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.194789 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-fernet-keys\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.194824 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-scripts\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.200608 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-scripts\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.202736 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-config-data\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.207292 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-credential-keys\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.207917 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-combined-ca-bundle\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.214323 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/91494b32-1284-49fb-b548-3fbc5f1e1ddf-fernet-keys\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.263053 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llzkr\" (UniqueName: \"kubernetes.io/projected/91494b32-1284-49fb-b548-3fbc5f1e1ddf-kube-api-access-llzkr\") pod \"keystone-678dff9ff-k6jwl\" (UID: \"91494b32-1284-49fb-b548-3fbc5f1e1ddf\") " pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.284981 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.768076 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-678dff9ff-k6jwl"] Jan 27 08:10:33 crc kubenswrapper[4787]: I0127 08:10:33.880849 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" event={"ID":"91494b32-1284-49fb-b548-3fbc5f1e1ddf","Type":"ContainerStarted","Data":"fe183bddc005b46d8e0a08fe43c497acbbbe6d36f6f9ffe76f7d65ae4c116664"} Jan 27 08:10:34 crc kubenswrapper[4787]: I0127 08:10:34.890939 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" event={"ID":"91494b32-1284-49fb-b548-3fbc5f1e1ddf","Type":"ContainerStarted","Data":"e517dd65d86b2e96567d28e4ec1b91bea1fbb099c07e3b575ef26f54f268fc51"} Jan 27 08:10:34 crc kubenswrapper[4787]: I0127 08:10:34.891076 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:10:34 crc kubenswrapper[4787]: I0127 08:10:34.928902 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" podStartSLOduration=2.9288756830000002 podStartE2EDuration="2.928875683s" podCreationTimestamp="2026-01-27 08:10:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 08:10:34.916064818 +0000 UTC m=+1140.568420320" watchObservedRunningTime="2026-01-27 08:10:34.928875683 +0000 UTC m=+1140.581231195" Jan 27 08:10:58 crc kubenswrapper[4787]: I0127 08:10:58.373019 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:10:58 crc kubenswrapper[4787]: I0127 08:10:58.375070 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/placement-545ff75684-kqpgf" Jan 27 08:11:04 crc kubenswrapper[4787]: I0127 08:11:04.899595 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/keystone-678dff9ff-k6jwl" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.413112 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.414970 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.418695 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-config" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.419085 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstack-config-secret" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.419430 4787 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstackclient-openstackclient-dockercfg-bg2tl" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.426802 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.601676 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4826cdba-006d-447f-a206-367bd1dc8893-openstack-config-secret\") pod \"openstackclient\" (UID: \"4826cdba-006d-447f-a206-367bd1dc8893\") " pod="nova-kuttl-default/openstackclient" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.601892 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkggv\" (UniqueName: \"kubernetes.io/projected/4826cdba-006d-447f-a206-367bd1dc8893-kube-api-access-jkggv\") pod \"openstackclient\" (UID: \"4826cdba-006d-447f-a206-367bd1dc8893\") " pod="nova-kuttl-default/openstackclient" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.602295 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4826cdba-006d-447f-a206-367bd1dc8893-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4826cdba-006d-447f-a206-367bd1dc8893\") " pod="nova-kuttl-default/openstackclient" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.602354 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4826cdba-006d-447f-a206-367bd1dc8893-openstack-config\") pod \"openstackclient\" (UID: \"4826cdba-006d-447f-a206-367bd1dc8893\") " pod="nova-kuttl-default/openstackclient" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.703909 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4826cdba-006d-447f-a206-367bd1dc8893-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4826cdba-006d-447f-a206-367bd1dc8893\") " pod="nova-kuttl-default/openstackclient" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.703958 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4826cdba-006d-447f-a206-367bd1dc8893-openstack-config\") pod \"openstackclient\" (UID: \"4826cdba-006d-447f-a206-367bd1dc8893\") " pod="nova-kuttl-default/openstackclient" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.704009 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4826cdba-006d-447f-a206-367bd1dc8893-openstack-config-secret\") pod \"openstackclient\" (UID: \"4826cdba-006d-447f-a206-367bd1dc8893\") " pod="nova-kuttl-default/openstackclient" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.704043 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkggv\" (UniqueName: \"kubernetes.io/projected/4826cdba-006d-447f-a206-367bd1dc8893-kube-api-access-jkggv\") pod \"openstackclient\" (UID: \"4826cdba-006d-447f-a206-367bd1dc8893\") " pod="nova-kuttl-default/openstackclient" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.706051 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4826cdba-006d-447f-a206-367bd1dc8893-openstack-config\") pod \"openstackclient\" (UID: \"4826cdba-006d-447f-a206-367bd1dc8893\") " pod="nova-kuttl-default/openstackclient" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.711701 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4826cdba-006d-447f-a206-367bd1dc8893-openstack-config-secret\") pod \"openstackclient\" (UID: \"4826cdba-006d-447f-a206-367bd1dc8893\") " pod="nova-kuttl-default/openstackclient" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.718296 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4826cdba-006d-447f-a206-367bd1dc8893-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4826cdba-006d-447f-a206-367bd1dc8893\") " pod="nova-kuttl-default/openstackclient" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.720649 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkggv\" (UniqueName: \"kubernetes.io/projected/4826cdba-006d-447f-a206-367bd1dc8893-kube-api-access-jkggv\") pod \"openstackclient\" (UID: \"4826cdba-006d-447f-a206-367bd1dc8893\") " pod="nova-kuttl-default/openstackclient" Jan 27 08:11:09 crc kubenswrapper[4787]: I0127 08:11:09.740203 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 27 08:11:10 crc kubenswrapper[4787]: I0127 08:11:10.216937 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 27 08:11:11 crc kubenswrapper[4787]: I0127 08:11:11.208631 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstackclient" event={"ID":"4826cdba-006d-447f-a206-367bd1dc8893","Type":"ContainerStarted","Data":"a77022d652bfe8800a86ef8cf777ec9be7b6f1f6b19e884420ba2176df40c740"} Jan 27 08:11:18 crc kubenswrapper[4787]: I0127 08:11:18.268943 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstackclient" event={"ID":"4826cdba-006d-447f-a206-367bd1dc8893","Type":"ContainerStarted","Data":"700a782a32d14e996c4275c291752160fe2343396aef23102a9c39a0782e7024"} Jan 27 08:11:18 crc kubenswrapper[4787]: I0127 08:11:18.284985 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/openstackclient" podStartSLOduration=2.00080998 podStartE2EDuration="9.284965773s" podCreationTimestamp="2026-01-27 08:11:09 +0000 UTC" firstStartedPulling="2026-01-27 08:11:10.225232852 +0000 UTC m=+1175.877588344" lastFinishedPulling="2026-01-27 08:11:17.509388605 +0000 UTC m=+1183.161744137" observedRunningTime="2026-01-27 08:11:18.283869916 +0000 UTC m=+1183.936225408" watchObservedRunningTime="2026-01-27 08:11:18.284965773 +0000 UTC m=+1183.937321265" Jan 27 08:11:27 crc kubenswrapper[4787]: I0127 08:11:27.404855 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs"] Jan 27 08:11:27 crc kubenswrapper[4787]: I0127 08:11:27.405809 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" podUID="3ad17a28-2cb1-4455-8d38-58919a287ae6" containerName="manager" containerID="cri-o://e67cc281b7eb394ed1cf80319838ad97dd8c88447cab1e5fbeb55f20bbbe9488" gracePeriod=10 Jan 27 08:11:27 crc kubenswrapper[4787]: I0127 08:11:27.813958 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-index-w98b8"] Jan 27 08:11:27 crc kubenswrapper[4787]: I0127 08:11:27.855318 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-57d6b69d8b-j9nxt"] Jan 27 08:11:27 crc kubenswrapper[4787]: I0127 08:11:27.879559 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-w98b8" Jan 27 08:11:27 crc kubenswrapper[4787]: I0127 08:11:27.879934 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-j9nxt" Jan 27 08:11:27 crc kubenswrapper[4787]: I0127 08:11:27.888663 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-index-w98b8"] Jan 27 08:11:27 crc kubenswrapper[4787]: I0127 08:11:27.914529 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" Jan 27 08:11:27 crc kubenswrapper[4787]: I0127 08:11:27.926006 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-index-dockercfg-6ndhv" Jan 27 08:11:27 crc kubenswrapper[4787]: I0127 08:11:27.942330 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6bn4\" (UniqueName: \"kubernetes.io/projected/d837af28-1f85-4995-9299-df5a288df488-kube-api-access-j6bn4\") pod \"nova-operator-index-w98b8\" (UID: \"d837af28-1f85-4995-9299-df5a288df488\") " pod="openstack-operators/nova-operator-index-w98b8" Jan 27 08:11:27 crc kubenswrapper[4787]: I0127 08:11:27.942459 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkxsb\" (UniqueName: \"kubernetes.io/projected/b0a80c53-be1f-444d-a49a-05c89daad297-kube-api-access-bkxsb\") pod \"nova-operator-controller-manager-57d6b69d8b-j9nxt\" (UID: \"b0a80c53-be1f-444d-a49a-05c89daad297\") " pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-j9nxt" Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.043591 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vdjb\" (UniqueName: \"kubernetes.io/projected/3ad17a28-2cb1-4455-8d38-58919a287ae6-kube-api-access-2vdjb\") pod \"3ad17a28-2cb1-4455-8d38-58919a287ae6\" (UID: \"3ad17a28-2cb1-4455-8d38-58919a287ae6\") " Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.044706 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkxsb\" (UniqueName: \"kubernetes.io/projected/b0a80c53-be1f-444d-a49a-05c89daad297-kube-api-access-bkxsb\") pod \"nova-operator-controller-manager-57d6b69d8b-j9nxt\" (UID: \"b0a80c53-be1f-444d-a49a-05c89daad297\") " pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-j9nxt" Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.044767 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6bn4\" (UniqueName: \"kubernetes.io/projected/d837af28-1f85-4995-9299-df5a288df488-kube-api-access-j6bn4\") pod \"nova-operator-index-w98b8\" (UID: \"d837af28-1f85-4995-9299-df5a288df488\") " pod="openstack-operators/nova-operator-index-w98b8" Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.053191 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ad17a28-2cb1-4455-8d38-58919a287ae6-kube-api-access-2vdjb" (OuterVolumeSpecName: "kube-api-access-2vdjb") pod "3ad17a28-2cb1-4455-8d38-58919a287ae6" (UID: "3ad17a28-2cb1-4455-8d38-58919a287ae6"). InnerVolumeSpecName "kube-api-access-2vdjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.077721 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkxsb\" (UniqueName: \"kubernetes.io/projected/b0a80c53-be1f-444d-a49a-05c89daad297-kube-api-access-bkxsb\") pod \"nova-operator-controller-manager-57d6b69d8b-j9nxt\" (UID: \"b0a80c53-be1f-444d-a49a-05c89daad297\") " pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-j9nxt" Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.109814 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6bn4\" (UniqueName: \"kubernetes.io/projected/d837af28-1f85-4995-9299-df5a288df488-kube-api-access-j6bn4\") pod \"nova-operator-index-w98b8\" (UID: \"d837af28-1f85-4995-9299-df5a288df488\") " pod="openstack-operators/nova-operator-index-w98b8" Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.146565 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vdjb\" (UniqueName: \"kubernetes.io/projected/3ad17a28-2cb1-4455-8d38-58919a287ae6-kube-api-access-2vdjb\") on node \"crc\" DevicePath \"\"" Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.223710 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57d6b69d8b-j9nxt"] Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.236031 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-w98b8" Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.244282 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-j9nxt" Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.280572 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq"] Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.281771 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" podUID="86e4bb51-0d54-49f8-b158-8338d845d92e" containerName="operator" containerID="cri-o://5cbc51ae998c8ec5e3d568f273050ca82bb17cea0dd97580f6bd144d80fe9b5f" gracePeriod=10 Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.354899 4787 generic.go:334] "Generic (PLEG): container finished" podID="3ad17a28-2cb1-4455-8d38-58919a287ae6" containerID="e67cc281b7eb394ed1cf80319838ad97dd8c88447cab1e5fbeb55f20bbbe9488" exitCode=0 Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.354959 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.354956 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" event={"ID":"3ad17a28-2cb1-4455-8d38-58919a287ae6","Type":"ContainerDied","Data":"e67cc281b7eb394ed1cf80319838ad97dd8c88447cab1e5fbeb55f20bbbe9488"} Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.356219 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs" event={"ID":"3ad17a28-2cb1-4455-8d38-58919a287ae6","Type":"ContainerDied","Data":"2b4b23f00d60ad642771edd29b4d11331ba1889dbee6abb9a2f10d40a9233501"} Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.356261 4787 scope.go:117] "RemoveContainer" containerID="e67cc281b7eb394ed1cf80319838ad97dd8c88447cab1e5fbeb55f20bbbe9488" Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.435703 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs"] Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.440752 4787 scope.go:117] "RemoveContainer" containerID="e67cc281b7eb394ed1cf80319838ad97dd8c88447cab1e5fbeb55f20bbbe9488" Jan 27 08:11:28 crc kubenswrapper[4787]: E0127 08:11:28.443728 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e67cc281b7eb394ed1cf80319838ad97dd8c88447cab1e5fbeb55f20bbbe9488\": container with ID starting with e67cc281b7eb394ed1cf80319838ad97dd8c88447cab1e5fbeb55f20bbbe9488 not found: ID does not exist" containerID="e67cc281b7eb394ed1cf80319838ad97dd8c88447cab1e5fbeb55f20bbbe9488" Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.443797 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e67cc281b7eb394ed1cf80319838ad97dd8c88447cab1e5fbeb55f20bbbe9488"} err="failed to get container status \"e67cc281b7eb394ed1cf80319838ad97dd8c88447cab1e5fbeb55f20bbbe9488\": rpc error: code = NotFound desc = could not find container \"e67cc281b7eb394ed1cf80319838ad97dd8c88447cab1e5fbeb55f20bbbe9488\": container with ID starting with e67cc281b7eb394ed1cf80319838ad97dd8c88447cab1e5fbeb55f20bbbe9488 not found: ID does not exist" Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.451841 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57d6b69d8b-s8kvs"] Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.821918 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57d6b69d8b-j9nxt"] Jan 27 08:11:28 crc kubenswrapper[4787]: I0127 08:11:28.967427 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-index-w98b8"] Jan 27 08:11:29 crc kubenswrapper[4787]: W0127 08:11:29.007783 4787 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd837af28_1f85_4995_9299_df5a288df488.slice/crio-a503e0936eba5a61fa705651473098014686ae86a0e2f0c7e561df05e7f9307f WatchSource:0}: Error finding container a503e0936eba5a61fa705651473098014686ae86a0e2f0c7e561df05e7f9307f: Status 404 returned error can't find the container with id a503e0936eba5a61fa705651473098014686ae86a0e2f0c7e561df05e7f9307f Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.040655 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.058974 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lftt5\" (UniqueName: \"kubernetes.io/projected/86e4bb51-0d54-49f8-b158-8338d845d92e-kube-api-access-lftt5\") pod \"86e4bb51-0d54-49f8-b158-8338d845d92e\" (UID: \"86e4bb51-0d54-49f8-b158-8338d845d92e\") " Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.065830 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86e4bb51-0d54-49f8-b158-8338d845d92e-kube-api-access-lftt5" (OuterVolumeSpecName: "kube-api-access-lftt5") pod "86e4bb51-0d54-49f8-b158-8338d845d92e" (UID: "86e4bb51-0d54-49f8-b158-8338d845d92e"). InnerVolumeSpecName "kube-api-access-lftt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.088863 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ad17a28-2cb1-4455-8d38-58919a287ae6" path="/var/lib/kubelet/pods/3ad17a28-2cb1-4455-8d38-58919a287ae6/volumes" Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.160227 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lftt5\" (UniqueName: \"kubernetes.io/projected/86e4bb51-0d54-49f8-b158-8338d845d92e-kube-api-access-lftt5\") on node \"crc\" DevicePath \"\"" Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.364468 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-w98b8" event={"ID":"d837af28-1f85-4995-9299-df5a288df488","Type":"ContainerStarted","Data":"a503e0936eba5a61fa705651473098014686ae86a0e2f0c7e561df05e7f9307f"} Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.367175 4787 generic.go:334] "Generic (PLEG): container finished" podID="86e4bb51-0d54-49f8-b158-8338d845d92e" containerID="5cbc51ae998c8ec5e3d568f273050ca82bb17cea0dd97580f6bd144d80fe9b5f" exitCode=0 Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.367242 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" event={"ID":"86e4bb51-0d54-49f8-b158-8338d845d92e","Type":"ContainerDied","Data":"5cbc51ae998c8ec5e3d568f273050ca82bb17cea0dd97580f6bd144d80fe9b5f"} Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.367247 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.367269 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq" event={"ID":"86e4bb51-0d54-49f8-b158-8338d845d92e","Type":"ContainerDied","Data":"0a86cb2179439de11cc5dfa2c7d576a7b85ad83b8b80b1dd922ae9e6f93584cf"} Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.367292 4787 scope.go:117] "RemoveContainer" containerID="5cbc51ae998c8ec5e3d568f273050ca82bb17cea0dd97580f6bd144d80fe9b5f" Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.369592 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-j9nxt" event={"ID":"b0a80c53-be1f-444d-a49a-05c89daad297","Type":"ContainerStarted","Data":"48b79785028da806daee7f9f9c684f5e147f8b578e7cccc7511b149d38aa6cce"} Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.369621 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-j9nxt" event={"ID":"b0a80c53-be1f-444d-a49a-05c89daad297","Type":"ContainerStarted","Data":"cf3c11c74cb52d6c2694795eb36955d9b2e7cc2dd460a9793ab3f571101c7cfb"} Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.370297 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-j9nxt" Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.391974 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-j9nxt" podStartSLOduration=2.391951648 podStartE2EDuration="2.391951648s" podCreationTimestamp="2026-01-27 08:11:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 08:11:29.3863365 +0000 UTC m=+1195.038691992" watchObservedRunningTime="2026-01-27 08:11:29.391951648 +0000 UTC m=+1195.044307140" Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.417614 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq"] Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.423258 4787 scope.go:117] "RemoveContainer" containerID="5cbc51ae998c8ec5e3d568f273050ca82bb17cea0dd97580f6bd144d80fe9b5f" Jan 27 08:11:29 crc kubenswrapper[4787]: E0127 08:11:29.423651 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cbc51ae998c8ec5e3d568f273050ca82bb17cea0dd97580f6bd144d80fe9b5f\": container with ID starting with 5cbc51ae998c8ec5e3d568f273050ca82bb17cea0dd97580f6bd144d80fe9b5f not found: ID does not exist" containerID="5cbc51ae998c8ec5e3d568f273050ca82bb17cea0dd97580f6bd144d80fe9b5f" Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.423699 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cbc51ae998c8ec5e3d568f273050ca82bb17cea0dd97580f6bd144d80fe9b5f"} err="failed to get container status \"5cbc51ae998c8ec5e3d568f273050ca82bb17cea0dd97580f6bd144d80fe9b5f\": rpc error: code = NotFound desc = could not find container \"5cbc51ae998c8ec5e3d568f273050ca82bb17cea0dd97580f6bd144d80fe9b5f\": container with ID starting with 5cbc51ae998c8ec5e3d568f273050ca82bb17cea0dd97580f6bd144d80fe9b5f not found: ID does not exist" Jan 27 08:11:29 crc kubenswrapper[4787]: I0127 08:11:29.423853 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-controller-init-59cc4f5964-ch4bq"] Jan 27 08:11:30 crc kubenswrapper[4787]: I0127 08:11:30.378520 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-w98b8" event={"ID":"d837af28-1f85-4995-9299-df5a288df488","Type":"ContainerStarted","Data":"d691ee43acf65ce1a09de83c30383fe2384fd18e046abf9eb4dedd4552b18cbf"} Jan 27 08:11:30 crc kubenswrapper[4787]: I0127 08:11:30.407470 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-index-w98b8" podStartSLOduration=2.730769323 podStartE2EDuration="3.407438157s" podCreationTimestamp="2026-01-27 08:11:27 +0000 UTC" firstStartedPulling="2026-01-27 08:11:29.013682619 +0000 UTC m=+1194.666038101" lastFinishedPulling="2026-01-27 08:11:29.690351433 +0000 UTC m=+1195.342706935" observedRunningTime="2026-01-27 08:11:30.398868585 +0000 UTC m=+1196.051224097" watchObservedRunningTime="2026-01-27 08:11:30.407438157 +0000 UTC m=+1196.059793649" Jan 27 08:11:30 crc kubenswrapper[4787]: I0127 08:11:30.499349 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/nova-operator-index-w98b8"] Jan 27 08:11:30 crc kubenswrapper[4787]: I0127 08:11:30.908977 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-index-2jn9v"] Jan 27 08:11:30 crc kubenswrapper[4787]: E0127 08:11:30.909482 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ad17a28-2cb1-4455-8d38-58919a287ae6" containerName="manager" Jan 27 08:11:30 crc kubenswrapper[4787]: I0127 08:11:30.909506 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ad17a28-2cb1-4455-8d38-58919a287ae6" containerName="manager" Jan 27 08:11:30 crc kubenswrapper[4787]: E0127 08:11:30.909597 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86e4bb51-0d54-49f8-b158-8338d845d92e" containerName="operator" Jan 27 08:11:30 crc kubenswrapper[4787]: I0127 08:11:30.909606 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="86e4bb51-0d54-49f8-b158-8338d845d92e" containerName="operator" Jan 27 08:11:30 crc kubenswrapper[4787]: I0127 08:11:30.909814 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="86e4bb51-0d54-49f8-b158-8338d845d92e" containerName="operator" Jan 27 08:11:30 crc kubenswrapper[4787]: I0127 08:11:30.909832 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ad17a28-2cb1-4455-8d38-58919a287ae6" containerName="manager" Jan 27 08:11:30 crc kubenswrapper[4787]: I0127 08:11:30.910748 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-2jn9v" Jan 27 08:11:30 crc kubenswrapper[4787]: I0127 08:11:30.926335 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-index-2jn9v"] Jan 27 08:11:31 crc kubenswrapper[4787]: I0127 08:11:31.089153 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86e4bb51-0d54-49f8-b158-8338d845d92e" path="/var/lib/kubelet/pods/86e4bb51-0d54-49f8-b158-8338d845d92e/volumes" Jan 27 08:11:31 crc kubenswrapper[4787]: I0127 08:11:31.094424 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgvxp\" (UniqueName: \"kubernetes.io/projected/804d50f1-7da5-4d0c-8c0e-cac4c5e5a244-kube-api-access-tgvxp\") pod \"nova-operator-index-2jn9v\" (UID: \"804d50f1-7da5-4d0c-8c0e-cac4c5e5a244\") " pod="openstack-operators/nova-operator-index-2jn9v" Jan 27 08:11:31 crc kubenswrapper[4787]: I0127 08:11:31.196124 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgvxp\" (UniqueName: \"kubernetes.io/projected/804d50f1-7da5-4d0c-8c0e-cac4c5e5a244-kube-api-access-tgvxp\") pod \"nova-operator-index-2jn9v\" (UID: \"804d50f1-7da5-4d0c-8c0e-cac4c5e5a244\") " pod="openstack-operators/nova-operator-index-2jn9v" Jan 27 08:11:31 crc kubenswrapper[4787]: I0127 08:11:31.232763 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgvxp\" (UniqueName: \"kubernetes.io/projected/804d50f1-7da5-4d0c-8c0e-cac4c5e5a244-kube-api-access-tgvxp\") pod \"nova-operator-index-2jn9v\" (UID: \"804d50f1-7da5-4d0c-8c0e-cac4c5e5a244\") " pod="openstack-operators/nova-operator-index-2jn9v" Jan 27 08:11:31 crc kubenswrapper[4787]: I0127 08:11:31.303039 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-2jn9v" Jan 27 08:11:31 crc kubenswrapper[4787]: I0127 08:11:31.801360 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-index-2jn9v"] Jan 27 08:11:32 crc kubenswrapper[4787]: I0127 08:11:32.401829 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-2jn9v" event={"ID":"804d50f1-7da5-4d0c-8c0e-cac4c5e5a244","Type":"ContainerStarted","Data":"fe27269b0a3babaa627cfac432133034ec795f2df0a87c61ed76f07a1949f31d"} Jan 27 08:11:32 crc kubenswrapper[4787]: I0127 08:11:32.402303 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-2jn9v" event={"ID":"804d50f1-7da5-4d0c-8c0e-cac4c5e5a244","Type":"ContainerStarted","Data":"a1549453f2fb0c0766b27c9809d64ccb6d4fa4219b47e921cc333e5c584bad78"} Jan 27 08:11:32 crc kubenswrapper[4787]: I0127 08:11:32.401951 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/nova-operator-index-w98b8" podUID="d837af28-1f85-4995-9299-df5a288df488" containerName="registry-server" containerID="cri-o://d691ee43acf65ce1a09de83c30383fe2384fd18e046abf9eb4dedd4552b18cbf" gracePeriod=2 Jan 27 08:11:32 crc kubenswrapper[4787]: I0127 08:11:32.431379 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-index-2jn9v" podStartSLOduration=2.377057616 podStartE2EDuration="2.431358239s" podCreationTimestamp="2026-01-27 08:11:30 +0000 UTC" firstStartedPulling="2026-01-27 08:11:31.816523473 +0000 UTC m=+1197.468878965" lastFinishedPulling="2026-01-27 08:11:31.870824096 +0000 UTC m=+1197.523179588" observedRunningTime="2026-01-27 08:11:32.422595173 +0000 UTC m=+1198.074950685" watchObservedRunningTime="2026-01-27 08:11:32.431358239 +0000 UTC m=+1198.083713731" Jan 27 08:11:32 crc kubenswrapper[4787]: I0127 08:11:32.879189 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-w98b8" Jan 27 08:11:33 crc kubenswrapper[4787]: I0127 08:11:33.034044 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6bn4\" (UniqueName: \"kubernetes.io/projected/d837af28-1f85-4995-9299-df5a288df488-kube-api-access-j6bn4\") pod \"d837af28-1f85-4995-9299-df5a288df488\" (UID: \"d837af28-1f85-4995-9299-df5a288df488\") " Jan 27 08:11:33 crc kubenswrapper[4787]: I0127 08:11:33.040727 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d837af28-1f85-4995-9299-df5a288df488-kube-api-access-j6bn4" (OuterVolumeSpecName: "kube-api-access-j6bn4") pod "d837af28-1f85-4995-9299-df5a288df488" (UID: "d837af28-1f85-4995-9299-df5a288df488"). InnerVolumeSpecName "kube-api-access-j6bn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:11:33 crc kubenswrapper[4787]: I0127 08:11:33.136508 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6bn4\" (UniqueName: \"kubernetes.io/projected/d837af28-1f85-4995-9299-df5a288df488-kube-api-access-j6bn4\") on node \"crc\" DevicePath \"\"" Jan 27 08:11:33 crc kubenswrapper[4787]: I0127 08:11:33.413475 4787 generic.go:334] "Generic (PLEG): container finished" podID="d837af28-1f85-4995-9299-df5a288df488" containerID="d691ee43acf65ce1a09de83c30383fe2384fd18e046abf9eb4dedd4552b18cbf" exitCode=0 Jan 27 08:11:33 crc kubenswrapper[4787]: I0127 08:11:33.413512 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-w98b8" Jan 27 08:11:33 crc kubenswrapper[4787]: I0127 08:11:33.413529 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-w98b8" event={"ID":"d837af28-1f85-4995-9299-df5a288df488","Type":"ContainerDied","Data":"d691ee43acf65ce1a09de83c30383fe2384fd18e046abf9eb4dedd4552b18cbf"} Jan 27 08:11:33 crc kubenswrapper[4787]: I0127 08:11:33.414098 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-w98b8" event={"ID":"d837af28-1f85-4995-9299-df5a288df488","Type":"ContainerDied","Data":"a503e0936eba5a61fa705651473098014686ae86a0e2f0c7e561df05e7f9307f"} Jan 27 08:11:33 crc kubenswrapper[4787]: I0127 08:11:33.414129 4787 scope.go:117] "RemoveContainer" containerID="d691ee43acf65ce1a09de83c30383fe2384fd18e046abf9eb4dedd4552b18cbf" Jan 27 08:11:33 crc kubenswrapper[4787]: I0127 08:11:33.442249 4787 scope.go:117] "RemoveContainer" containerID="d691ee43acf65ce1a09de83c30383fe2384fd18e046abf9eb4dedd4552b18cbf" Jan 27 08:11:33 crc kubenswrapper[4787]: E0127 08:11:33.442818 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d691ee43acf65ce1a09de83c30383fe2384fd18e046abf9eb4dedd4552b18cbf\": container with ID starting with d691ee43acf65ce1a09de83c30383fe2384fd18e046abf9eb4dedd4552b18cbf not found: ID does not exist" containerID="d691ee43acf65ce1a09de83c30383fe2384fd18e046abf9eb4dedd4552b18cbf" Jan 27 08:11:33 crc kubenswrapper[4787]: I0127 08:11:33.442864 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d691ee43acf65ce1a09de83c30383fe2384fd18e046abf9eb4dedd4552b18cbf"} err="failed to get container status \"d691ee43acf65ce1a09de83c30383fe2384fd18e046abf9eb4dedd4552b18cbf\": rpc error: code = NotFound desc = could not find container \"d691ee43acf65ce1a09de83c30383fe2384fd18e046abf9eb4dedd4552b18cbf\": container with ID starting with d691ee43acf65ce1a09de83c30383fe2384fd18e046abf9eb4dedd4552b18cbf not found: ID does not exist" Jan 27 08:11:33 crc kubenswrapper[4787]: I0127 08:11:33.456058 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/nova-operator-index-w98b8"] Jan 27 08:11:33 crc kubenswrapper[4787]: I0127 08:11:33.465726 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/nova-operator-index-w98b8"] Jan 27 08:11:35 crc kubenswrapper[4787]: I0127 08:11:35.090152 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d837af28-1f85-4995-9299-df5a288df488" path="/var/lib/kubelet/pods/d837af28-1f85-4995-9299-df5a288df488/volumes" Jan 27 08:11:38 crc kubenswrapper[4787]: I0127 08:11:38.247407 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-57d6b69d8b-j9nxt" Jan 27 08:11:41 crc kubenswrapper[4787]: I0127 08:11:41.303962 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/nova-operator-index-2jn9v" Jan 27 08:11:41 crc kubenswrapper[4787]: I0127 08:11:41.304024 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-index-2jn9v" Jan 27 08:11:41 crc kubenswrapper[4787]: I0127 08:11:41.343029 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/nova-operator-index-2jn9v" Jan 27 08:11:41 crc kubenswrapper[4787]: I0127 08:11:41.504399 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-index-2jn9v" Jan 27 08:11:52 crc kubenswrapper[4787]: I0127 08:11:52.823602 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:11:52 crc kubenswrapper[4787]: I0127 08:11:52.824407 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:11:54 crc kubenswrapper[4787]: I0127 08:11:54.952999 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2"] Jan 27 08:11:54 crc kubenswrapper[4787]: E0127 08:11:54.953815 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d837af28-1f85-4995-9299-df5a288df488" containerName="registry-server" Jan 27 08:11:54 crc kubenswrapper[4787]: I0127 08:11:54.953830 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="d837af28-1f85-4995-9299-df5a288df488" containerName="registry-server" Jan 27 08:11:54 crc kubenswrapper[4787]: I0127 08:11:54.953984 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="d837af28-1f85-4995-9299-df5a288df488" containerName="registry-server" Jan 27 08:11:54 crc kubenswrapper[4787]: I0127 08:11:54.955082 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" Jan 27 08:11:54 crc kubenswrapper[4787]: I0127 08:11:54.962983 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2"] Jan 27 08:11:54 crc kubenswrapper[4787]: I0127 08:11:54.965461 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7z9mk" Jan 27 08:11:55 crc kubenswrapper[4787]: I0127 08:11:55.057296 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sftgl\" (UniqueName: \"kubernetes.io/projected/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-kube-api-access-sftgl\") pod \"666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2\" (UID: \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\") " pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" Jan 27 08:11:55 crc kubenswrapper[4787]: I0127 08:11:55.057405 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-util\") pod \"666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2\" (UID: \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\") " pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" Jan 27 08:11:55 crc kubenswrapper[4787]: I0127 08:11:55.057471 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-bundle\") pod \"666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2\" (UID: \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\") " pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" Jan 27 08:11:55 crc kubenswrapper[4787]: I0127 08:11:55.159506 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-bundle\") pod \"666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2\" (UID: \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\") " pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" Jan 27 08:11:55 crc kubenswrapper[4787]: I0127 08:11:55.159930 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sftgl\" (UniqueName: \"kubernetes.io/projected/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-kube-api-access-sftgl\") pod \"666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2\" (UID: \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\") " pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" Jan 27 08:11:55 crc kubenswrapper[4787]: I0127 08:11:55.160158 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-bundle\") pod \"666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2\" (UID: \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\") " pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" Jan 27 08:11:55 crc kubenswrapper[4787]: I0127 08:11:55.160367 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-util\") pod \"666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2\" (UID: \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\") " pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" Jan 27 08:11:55 crc kubenswrapper[4787]: I0127 08:11:55.161018 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-util\") pod \"666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2\" (UID: \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\") " pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" Jan 27 08:11:55 crc kubenswrapper[4787]: I0127 08:11:55.181369 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sftgl\" (UniqueName: \"kubernetes.io/projected/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-kube-api-access-sftgl\") pod \"666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2\" (UID: \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\") " pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" Jan 27 08:11:55 crc kubenswrapper[4787]: I0127 08:11:55.278922 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" Jan 27 08:11:55 crc kubenswrapper[4787]: I0127 08:11:55.789369 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2"] Jan 27 08:11:56 crc kubenswrapper[4787]: I0127 08:11:56.601392 4787 generic.go:334] "Generic (PLEG): container finished" podID="46a8c934-ec5d-48f9-8c39-f4e4c730d7a5" containerID="22424c7a10f080df1cdbb0e1dd7570d87671d5e19608eda8e3504e4abb4919a6" exitCode=0 Jan 27 08:11:56 crc kubenswrapper[4787]: I0127 08:11:56.601487 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" event={"ID":"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5","Type":"ContainerDied","Data":"22424c7a10f080df1cdbb0e1dd7570d87671d5e19608eda8e3504e4abb4919a6"} Jan 27 08:11:56 crc kubenswrapper[4787]: I0127 08:11:56.601715 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" event={"ID":"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5","Type":"ContainerStarted","Data":"63e398e677bb8f944eb1dfb72e783fa606c3fbf7e709c710ccf82db09ea132a2"} Jan 27 08:11:59 crc kubenswrapper[4787]: I0127 08:11:59.626446 4787 generic.go:334] "Generic (PLEG): container finished" podID="46a8c934-ec5d-48f9-8c39-f4e4c730d7a5" containerID="d2a7b14a2aa008c1cb7abb31f77cb88e86b92e22245e9dc2eebf5af98791cf56" exitCode=0 Jan 27 08:11:59 crc kubenswrapper[4787]: I0127 08:11:59.626624 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" event={"ID":"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5","Type":"ContainerDied","Data":"d2a7b14a2aa008c1cb7abb31f77cb88e86b92e22245e9dc2eebf5af98791cf56"} Jan 27 08:12:00 crc kubenswrapper[4787]: I0127 08:12:00.644319 4787 generic.go:334] "Generic (PLEG): container finished" podID="46a8c934-ec5d-48f9-8c39-f4e4c730d7a5" containerID="3aef8535db6c863eb9ad8cbb5b308d1f9d18b7b0e842a6871d7f37a64dfcaae1" exitCode=0 Jan 27 08:12:00 crc kubenswrapper[4787]: I0127 08:12:00.644499 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" event={"ID":"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5","Type":"ContainerDied","Data":"3aef8535db6c863eb9ad8cbb5b308d1f9d18b7b0e842a6871d7f37a64dfcaae1"} Jan 27 08:12:02 crc kubenswrapper[4787]: I0127 08:12:02.030324 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" Jan 27 08:12:02 crc kubenswrapper[4787]: I0127 08:12:02.095662 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sftgl\" (UniqueName: \"kubernetes.io/projected/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-kube-api-access-sftgl\") pod \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\" (UID: \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\") " Jan 27 08:12:02 crc kubenswrapper[4787]: I0127 08:12:02.095826 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-bundle\") pod \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\" (UID: \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\") " Jan 27 08:12:02 crc kubenswrapper[4787]: I0127 08:12:02.095944 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-util\") pod \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\" (UID: \"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5\") " Jan 27 08:12:02 crc kubenswrapper[4787]: I0127 08:12:02.097599 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-bundle" (OuterVolumeSpecName: "bundle") pod "46a8c934-ec5d-48f9-8c39-f4e4c730d7a5" (UID: "46a8c934-ec5d-48f9-8c39-f4e4c730d7a5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:12:02 crc kubenswrapper[4787]: I0127 08:12:02.103608 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-kube-api-access-sftgl" (OuterVolumeSpecName: "kube-api-access-sftgl") pod "46a8c934-ec5d-48f9-8c39-f4e4c730d7a5" (UID: "46a8c934-ec5d-48f9-8c39-f4e4c730d7a5"). InnerVolumeSpecName "kube-api-access-sftgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:12:02 crc kubenswrapper[4787]: I0127 08:12:02.109847 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-util" (OuterVolumeSpecName: "util") pod "46a8c934-ec5d-48f9-8c39-f4e4c730d7a5" (UID: "46a8c934-ec5d-48f9-8c39-f4e4c730d7a5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:12:02 crc kubenswrapper[4787]: I0127 08:12:02.197681 4787 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-util\") on node \"crc\" DevicePath \"\"" Jan 27 08:12:02 crc kubenswrapper[4787]: I0127 08:12:02.198156 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sftgl\" (UniqueName: \"kubernetes.io/projected/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-kube-api-access-sftgl\") on node \"crc\" DevicePath \"\"" Jan 27 08:12:02 crc kubenswrapper[4787]: I0127 08:12:02.198172 4787 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/46a8c934-ec5d-48f9-8c39-f4e4c730d7a5-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 08:12:02 crc kubenswrapper[4787]: I0127 08:12:02.663160 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" event={"ID":"46a8c934-ec5d-48f9-8c39-f4e4c730d7a5","Type":"ContainerDied","Data":"63e398e677bb8f944eb1dfb72e783fa606c3fbf7e709c710ccf82db09ea132a2"} Jan 27 08:12:02 crc kubenswrapper[4787]: I0127 08:12:02.663214 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63e398e677bb8f944eb1dfb72e783fa606c3fbf7e709c710ccf82db09ea132a2" Jan 27 08:12:02 crc kubenswrapper[4787]: I0127 08:12:02.663626 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2" Jan 27 08:12:22 crc kubenswrapper[4787]: I0127 08:12:22.822957 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:12:22 crc kubenswrapper[4787]: I0127 08:12:22.823876 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:12:52 crc kubenswrapper[4787]: I0127 08:12:52.822686 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:12:52 crc kubenswrapper[4787]: I0127 08:12:52.823585 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:12:52 crc kubenswrapper[4787]: I0127 08:12:52.823651 4787 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 08:12:52 crc kubenswrapper[4787]: I0127 08:12:52.824537 4787 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"62fa4f58004172098a708acf93c4ba2c2d75c5b2ad098327437e26bfa28ab448"} pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 27 08:12:52 crc kubenswrapper[4787]: I0127 08:12:52.824632 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" containerID="cri-o://62fa4f58004172098a708acf93c4ba2c2d75c5b2ad098327437e26bfa28ab448" gracePeriod=600 Jan 27 08:12:53 crc kubenswrapper[4787]: I0127 08:12:53.075963 4787 generic.go:334] "Generic (PLEG): container finished" podID="f051e184-acac-47cf-9e04-9df648288715" containerID="62fa4f58004172098a708acf93c4ba2c2d75c5b2ad098327437e26bfa28ab448" exitCode=0 Jan 27 08:12:53 crc kubenswrapper[4787]: I0127 08:12:53.085237 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerDied","Data":"62fa4f58004172098a708acf93c4ba2c2d75c5b2ad098327437e26bfa28ab448"} Jan 27 08:12:53 crc kubenswrapper[4787]: I0127 08:12:53.085285 4787 scope.go:117] "RemoveContainer" containerID="8407f8fd138ff9e300a7e8488e02cc30a66d50ed89a7c4ef1d3339c94e2a22b5" Jan 27 08:12:54 crc kubenswrapper[4787]: I0127 08:12:54.087236 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerStarted","Data":"66c62556ef822841ab93c99b23fda5b0b3ea129b94ebcca6f4cff0b2d8c7f32e"} Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.535303 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sdc6l"] Jan 27 08:14:25 crc kubenswrapper[4787]: E0127 08:14:25.536404 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46a8c934-ec5d-48f9-8c39-f4e4c730d7a5" containerName="util" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.536418 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="46a8c934-ec5d-48f9-8c39-f4e4c730d7a5" containerName="util" Jan 27 08:14:25 crc kubenswrapper[4787]: E0127 08:14:25.536445 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46a8c934-ec5d-48f9-8c39-f4e4c730d7a5" containerName="pull" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.536451 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="46a8c934-ec5d-48f9-8c39-f4e4c730d7a5" containerName="pull" Jan 27 08:14:25 crc kubenswrapper[4787]: E0127 08:14:25.536475 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46a8c934-ec5d-48f9-8c39-f4e4c730d7a5" containerName="extract" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.536481 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="46a8c934-ec5d-48f9-8c39-f4e4c730d7a5" containerName="extract" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.536657 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="46a8c934-ec5d-48f9-8c39-f4e4c730d7a5" containerName="extract" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.537789 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.549114 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sdc6l"] Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.677993 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njqjn\" (UniqueName: \"kubernetes.io/projected/8218445e-5e55-47de-8494-09626ae390d7-kube-api-access-njqjn\") pod \"redhat-operators-sdc6l\" (UID: \"8218445e-5e55-47de-8494-09626ae390d7\") " pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.678080 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8218445e-5e55-47de-8494-09626ae390d7-catalog-content\") pod \"redhat-operators-sdc6l\" (UID: \"8218445e-5e55-47de-8494-09626ae390d7\") " pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.678111 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8218445e-5e55-47de-8494-09626ae390d7-utilities\") pod \"redhat-operators-sdc6l\" (UID: \"8218445e-5e55-47de-8494-09626ae390d7\") " pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.779333 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8218445e-5e55-47de-8494-09626ae390d7-catalog-content\") pod \"redhat-operators-sdc6l\" (UID: \"8218445e-5e55-47de-8494-09626ae390d7\") " pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.779380 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8218445e-5e55-47de-8494-09626ae390d7-utilities\") pod \"redhat-operators-sdc6l\" (UID: \"8218445e-5e55-47de-8494-09626ae390d7\") " pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.779496 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njqjn\" (UniqueName: \"kubernetes.io/projected/8218445e-5e55-47de-8494-09626ae390d7-kube-api-access-njqjn\") pod \"redhat-operators-sdc6l\" (UID: \"8218445e-5e55-47de-8494-09626ae390d7\") " pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.779945 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8218445e-5e55-47de-8494-09626ae390d7-catalog-content\") pod \"redhat-operators-sdc6l\" (UID: \"8218445e-5e55-47de-8494-09626ae390d7\") " pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.780208 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8218445e-5e55-47de-8494-09626ae390d7-utilities\") pod \"redhat-operators-sdc6l\" (UID: \"8218445e-5e55-47de-8494-09626ae390d7\") " pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.817803 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njqjn\" (UniqueName: \"kubernetes.io/projected/8218445e-5e55-47de-8494-09626ae390d7-kube-api-access-njqjn\") pod \"redhat-operators-sdc6l\" (UID: \"8218445e-5e55-47de-8494-09626ae390d7\") " pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:25 crc kubenswrapper[4787]: I0127 08:14:25.857124 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:26 crc kubenswrapper[4787]: I0127 08:14:26.380496 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sdc6l"] Jan 27 08:14:26 crc kubenswrapper[4787]: I0127 08:14:26.765534 4787 generic.go:334] "Generic (PLEG): container finished" podID="8218445e-5e55-47de-8494-09626ae390d7" containerID="280a6824ecd02da11881b8f213c17337d293087aac7116df04d3b178db903adc" exitCode=0 Jan 27 08:14:26 crc kubenswrapper[4787]: I0127 08:14:26.765609 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sdc6l" event={"ID":"8218445e-5e55-47de-8494-09626ae390d7","Type":"ContainerDied","Data":"280a6824ecd02da11881b8f213c17337d293087aac7116df04d3b178db903adc"} Jan 27 08:14:26 crc kubenswrapper[4787]: I0127 08:14:26.765676 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sdc6l" event={"ID":"8218445e-5e55-47de-8494-09626ae390d7","Type":"ContainerStarted","Data":"3f35b98bcba954c309b775902bd7e82ed8b70ba8f32820c61d2416b6243a6a93"} Jan 27 08:14:26 crc kubenswrapper[4787]: I0127 08:14:26.768141 4787 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 27 08:14:27 crc kubenswrapper[4787]: I0127 08:14:27.776989 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sdc6l" event={"ID":"8218445e-5e55-47de-8494-09626ae390d7","Type":"ContainerStarted","Data":"e6bb138cca2f4e6fa156e54e99282b58c30f53856dc00f9a5d3f8a6670030034"} Jan 27 08:14:28 crc kubenswrapper[4787]: I0127 08:14:28.787014 4787 generic.go:334] "Generic (PLEG): container finished" podID="8218445e-5e55-47de-8494-09626ae390d7" containerID="e6bb138cca2f4e6fa156e54e99282b58c30f53856dc00f9a5d3f8a6670030034" exitCode=0 Jan 27 08:14:28 crc kubenswrapper[4787]: I0127 08:14:28.787141 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sdc6l" event={"ID":"8218445e-5e55-47de-8494-09626ae390d7","Type":"ContainerDied","Data":"e6bb138cca2f4e6fa156e54e99282b58c30f53856dc00f9a5d3f8a6670030034"} Jan 27 08:14:29 crc kubenswrapper[4787]: I0127 08:14:29.797079 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sdc6l" event={"ID":"8218445e-5e55-47de-8494-09626ae390d7","Type":"ContainerStarted","Data":"dae4ac2676ec238e108d5af9769af1e39361238e7a77c3ff359cb911aa7cfe96"} Jan 27 08:14:29 crc kubenswrapper[4787]: I0127 08:14:29.819887 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sdc6l" podStartSLOduration=2.364462915 podStartE2EDuration="4.819868013s" podCreationTimestamp="2026-01-27 08:14:25 +0000 UTC" firstStartedPulling="2026-01-27 08:14:26.767902764 +0000 UTC m=+1372.420258256" lastFinishedPulling="2026-01-27 08:14:29.223307862 +0000 UTC m=+1374.875663354" observedRunningTime="2026-01-27 08:14:29.813717446 +0000 UTC m=+1375.466072978" watchObservedRunningTime="2026-01-27 08:14:29.819868013 +0000 UTC m=+1375.472223515" Jan 27 08:14:35 crc kubenswrapper[4787]: I0127 08:14:35.858444 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:35 crc kubenswrapper[4787]: I0127 08:14:35.858903 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:35 crc kubenswrapper[4787]: I0127 08:14:35.898477 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:36 crc kubenswrapper[4787]: I0127 08:14:36.932534 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:36 crc kubenswrapper[4787]: I0127 08:14:36.982262 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sdc6l"] Jan 27 08:14:38 crc kubenswrapper[4787]: I0127 08:14:38.874497 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sdc6l" podUID="8218445e-5e55-47de-8494-09626ae390d7" containerName="registry-server" containerID="cri-o://dae4ac2676ec238e108d5af9769af1e39361238e7a77c3ff359cb911aa7cfe96" gracePeriod=2 Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.337225 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.419924 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8218445e-5e55-47de-8494-09626ae390d7-catalog-content\") pod \"8218445e-5e55-47de-8494-09626ae390d7\" (UID: \"8218445e-5e55-47de-8494-09626ae390d7\") " Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.420285 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njqjn\" (UniqueName: \"kubernetes.io/projected/8218445e-5e55-47de-8494-09626ae390d7-kube-api-access-njqjn\") pod \"8218445e-5e55-47de-8494-09626ae390d7\" (UID: \"8218445e-5e55-47de-8494-09626ae390d7\") " Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.420391 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8218445e-5e55-47de-8494-09626ae390d7-utilities\") pod \"8218445e-5e55-47de-8494-09626ae390d7\" (UID: \"8218445e-5e55-47de-8494-09626ae390d7\") " Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.421627 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8218445e-5e55-47de-8494-09626ae390d7-utilities" (OuterVolumeSpecName: "utilities") pod "8218445e-5e55-47de-8494-09626ae390d7" (UID: "8218445e-5e55-47de-8494-09626ae390d7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.426534 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8218445e-5e55-47de-8494-09626ae390d7-kube-api-access-njqjn" (OuterVolumeSpecName: "kube-api-access-njqjn") pod "8218445e-5e55-47de-8494-09626ae390d7" (UID: "8218445e-5e55-47de-8494-09626ae390d7"). InnerVolumeSpecName "kube-api-access-njqjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.522245 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njqjn\" (UniqueName: \"kubernetes.io/projected/8218445e-5e55-47de-8494-09626ae390d7-kube-api-access-njqjn\") on node \"crc\" DevicePath \"\"" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.522611 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8218445e-5e55-47de-8494-09626ae390d7-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.546257 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8218445e-5e55-47de-8494-09626ae390d7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8218445e-5e55-47de-8494-09626ae390d7" (UID: "8218445e-5e55-47de-8494-09626ae390d7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.624241 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8218445e-5e55-47de-8494-09626ae390d7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.883394 4787 generic.go:334] "Generic (PLEG): container finished" podID="8218445e-5e55-47de-8494-09626ae390d7" containerID="dae4ac2676ec238e108d5af9769af1e39361238e7a77c3ff359cb911aa7cfe96" exitCode=0 Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.883466 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sdc6l" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.883477 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sdc6l" event={"ID":"8218445e-5e55-47de-8494-09626ae390d7","Type":"ContainerDied","Data":"dae4ac2676ec238e108d5af9769af1e39361238e7a77c3ff359cb911aa7cfe96"} Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.883511 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sdc6l" event={"ID":"8218445e-5e55-47de-8494-09626ae390d7","Type":"ContainerDied","Data":"3f35b98bcba954c309b775902bd7e82ed8b70ba8f32820c61d2416b6243a6a93"} Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.883544 4787 scope.go:117] "RemoveContainer" containerID="dae4ac2676ec238e108d5af9769af1e39361238e7a77c3ff359cb911aa7cfe96" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.912595 4787 scope.go:117] "RemoveContainer" containerID="e6bb138cca2f4e6fa156e54e99282b58c30f53856dc00f9a5d3f8a6670030034" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.929784 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sdc6l"] Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.938850 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sdc6l"] Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.941970 4787 scope.go:117] "RemoveContainer" containerID="280a6824ecd02da11881b8f213c17337d293087aac7116df04d3b178db903adc" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.957218 4787 scope.go:117] "RemoveContainer" containerID="dae4ac2676ec238e108d5af9769af1e39361238e7a77c3ff359cb911aa7cfe96" Jan 27 08:14:39 crc kubenswrapper[4787]: E0127 08:14:39.957687 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dae4ac2676ec238e108d5af9769af1e39361238e7a77c3ff359cb911aa7cfe96\": container with ID starting with dae4ac2676ec238e108d5af9769af1e39361238e7a77c3ff359cb911aa7cfe96 not found: ID does not exist" containerID="dae4ac2676ec238e108d5af9769af1e39361238e7a77c3ff359cb911aa7cfe96" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.957754 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dae4ac2676ec238e108d5af9769af1e39361238e7a77c3ff359cb911aa7cfe96"} err="failed to get container status \"dae4ac2676ec238e108d5af9769af1e39361238e7a77c3ff359cb911aa7cfe96\": rpc error: code = NotFound desc = could not find container \"dae4ac2676ec238e108d5af9769af1e39361238e7a77c3ff359cb911aa7cfe96\": container with ID starting with dae4ac2676ec238e108d5af9769af1e39361238e7a77c3ff359cb911aa7cfe96 not found: ID does not exist" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.957787 4787 scope.go:117] "RemoveContainer" containerID="e6bb138cca2f4e6fa156e54e99282b58c30f53856dc00f9a5d3f8a6670030034" Jan 27 08:14:39 crc kubenswrapper[4787]: E0127 08:14:39.958224 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6bb138cca2f4e6fa156e54e99282b58c30f53856dc00f9a5d3f8a6670030034\": container with ID starting with e6bb138cca2f4e6fa156e54e99282b58c30f53856dc00f9a5d3f8a6670030034 not found: ID does not exist" containerID="e6bb138cca2f4e6fa156e54e99282b58c30f53856dc00f9a5d3f8a6670030034" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.958356 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6bb138cca2f4e6fa156e54e99282b58c30f53856dc00f9a5d3f8a6670030034"} err="failed to get container status \"e6bb138cca2f4e6fa156e54e99282b58c30f53856dc00f9a5d3f8a6670030034\": rpc error: code = NotFound desc = could not find container \"e6bb138cca2f4e6fa156e54e99282b58c30f53856dc00f9a5d3f8a6670030034\": container with ID starting with e6bb138cca2f4e6fa156e54e99282b58c30f53856dc00f9a5d3f8a6670030034 not found: ID does not exist" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.958476 4787 scope.go:117] "RemoveContainer" containerID="280a6824ecd02da11881b8f213c17337d293087aac7116df04d3b178db903adc" Jan 27 08:14:39 crc kubenswrapper[4787]: E0127 08:14:39.958885 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"280a6824ecd02da11881b8f213c17337d293087aac7116df04d3b178db903adc\": container with ID starting with 280a6824ecd02da11881b8f213c17337d293087aac7116df04d3b178db903adc not found: ID does not exist" containerID="280a6824ecd02da11881b8f213c17337d293087aac7116df04d3b178db903adc" Jan 27 08:14:39 crc kubenswrapper[4787]: I0127 08:14:39.958915 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"280a6824ecd02da11881b8f213c17337d293087aac7116df04d3b178db903adc"} err="failed to get container status \"280a6824ecd02da11881b8f213c17337d293087aac7116df04d3b178db903adc\": rpc error: code = NotFound desc = could not find container \"280a6824ecd02da11881b8f213c17337d293087aac7116df04d3b178db903adc\": container with ID starting with 280a6824ecd02da11881b8f213c17337d293087aac7116df04d3b178db903adc not found: ID does not exist" Jan 27 08:14:41 crc kubenswrapper[4787]: I0127 08:14:41.086797 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8218445e-5e55-47de-8494-09626ae390d7" path="/var/lib/kubelet/pods/8218445e-5e55-47de-8494-09626ae390d7/volumes" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.488657 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4g7ns"] Jan 27 08:14:48 crc kubenswrapper[4787]: E0127 08:14:48.489973 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8218445e-5e55-47de-8494-09626ae390d7" containerName="registry-server" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.489993 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="8218445e-5e55-47de-8494-09626ae390d7" containerName="registry-server" Jan 27 08:14:48 crc kubenswrapper[4787]: E0127 08:14:48.490031 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8218445e-5e55-47de-8494-09626ae390d7" containerName="extract-utilities" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.490039 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="8218445e-5e55-47de-8494-09626ae390d7" containerName="extract-utilities" Jan 27 08:14:48 crc kubenswrapper[4787]: E0127 08:14:48.490060 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8218445e-5e55-47de-8494-09626ae390d7" containerName="extract-content" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.490067 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="8218445e-5e55-47de-8494-09626ae390d7" containerName="extract-content" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.490262 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="8218445e-5e55-47de-8494-09626ae390d7" containerName="registry-server" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.522722 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.540113 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4g7ns"] Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.580885 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vm8ks\" (UniqueName: \"kubernetes.io/projected/28c90b47-9ec6-477a-a770-60904a0a4236-kube-api-access-vm8ks\") pod \"redhat-marketplace-4g7ns\" (UID: \"28c90b47-9ec6-477a-a770-60904a0a4236\") " pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.581004 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c90b47-9ec6-477a-a770-60904a0a4236-utilities\") pod \"redhat-marketplace-4g7ns\" (UID: \"28c90b47-9ec6-477a-a770-60904a0a4236\") " pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.581068 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c90b47-9ec6-477a-a770-60904a0a4236-catalog-content\") pod \"redhat-marketplace-4g7ns\" (UID: \"28c90b47-9ec6-477a-a770-60904a0a4236\") " pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.682251 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vm8ks\" (UniqueName: \"kubernetes.io/projected/28c90b47-9ec6-477a-a770-60904a0a4236-kube-api-access-vm8ks\") pod \"redhat-marketplace-4g7ns\" (UID: \"28c90b47-9ec6-477a-a770-60904a0a4236\") " pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.682321 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c90b47-9ec6-477a-a770-60904a0a4236-utilities\") pod \"redhat-marketplace-4g7ns\" (UID: \"28c90b47-9ec6-477a-a770-60904a0a4236\") " pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.682360 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c90b47-9ec6-477a-a770-60904a0a4236-catalog-content\") pod \"redhat-marketplace-4g7ns\" (UID: \"28c90b47-9ec6-477a-a770-60904a0a4236\") " pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.682972 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c90b47-9ec6-477a-a770-60904a0a4236-utilities\") pod \"redhat-marketplace-4g7ns\" (UID: \"28c90b47-9ec6-477a-a770-60904a0a4236\") " pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.683230 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c90b47-9ec6-477a-a770-60904a0a4236-catalog-content\") pod \"redhat-marketplace-4g7ns\" (UID: \"28c90b47-9ec6-477a-a770-60904a0a4236\") " pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.704293 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vm8ks\" (UniqueName: \"kubernetes.io/projected/28c90b47-9ec6-477a-a770-60904a0a4236-kube-api-access-vm8ks\") pod \"redhat-marketplace-4g7ns\" (UID: \"28c90b47-9ec6-477a-a770-60904a0a4236\") " pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:48 crc kubenswrapper[4787]: I0127 08:14:48.849201 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:49 crc kubenswrapper[4787]: I0127 08:14:49.314470 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4g7ns"] Jan 27 08:14:49 crc kubenswrapper[4787]: I0127 08:14:49.968109 4787 generic.go:334] "Generic (PLEG): container finished" podID="28c90b47-9ec6-477a-a770-60904a0a4236" containerID="8154237b3ba079aff74e2c4c4e45d7909ce1ee4e70679a73d5b8e5f1e23426f1" exitCode=0 Jan 27 08:14:49 crc kubenswrapper[4787]: I0127 08:14:49.968188 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4g7ns" event={"ID":"28c90b47-9ec6-477a-a770-60904a0a4236","Type":"ContainerDied","Data":"8154237b3ba079aff74e2c4c4e45d7909ce1ee4e70679a73d5b8e5f1e23426f1"} Jan 27 08:14:49 crc kubenswrapper[4787]: I0127 08:14:49.968632 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4g7ns" event={"ID":"28c90b47-9ec6-477a-a770-60904a0a4236","Type":"ContainerStarted","Data":"24234f9674cf861fa7b19408034aa2877fd4e5aaceb6d880efe0e608f15b8ca1"} Jan 27 08:14:50 crc kubenswrapper[4787]: I0127 08:14:50.978466 4787 generic.go:334] "Generic (PLEG): container finished" podID="28c90b47-9ec6-477a-a770-60904a0a4236" containerID="6ca8c474a5fe3de819fd795d3b2739afafc446c1cdd4220442f048ba2291eaf2" exitCode=0 Jan 27 08:14:50 crc kubenswrapper[4787]: I0127 08:14:50.978512 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4g7ns" event={"ID":"28c90b47-9ec6-477a-a770-60904a0a4236","Type":"ContainerDied","Data":"6ca8c474a5fe3de819fd795d3b2739afafc446c1cdd4220442f048ba2291eaf2"} Jan 27 08:14:51 crc kubenswrapper[4787]: I0127 08:14:51.989734 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4g7ns" event={"ID":"28c90b47-9ec6-477a-a770-60904a0a4236","Type":"ContainerStarted","Data":"3884bb8a07defe13492ec93fb1797ad29d963eecac9b644e081998844bc86085"} Jan 27 08:14:52 crc kubenswrapper[4787]: I0127 08:14:52.022755 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4g7ns" podStartSLOduration=2.5308242439999997 podStartE2EDuration="4.022725293s" podCreationTimestamp="2026-01-27 08:14:48 +0000 UTC" firstStartedPulling="2026-01-27 08:14:49.969949666 +0000 UTC m=+1395.622305158" lastFinishedPulling="2026-01-27 08:14:51.461850715 +0000 UTC m=+1397.114206207" observedRunningTime="2026-01-27 08:14:52.011537575 +0000 UTC m=+1397.663893087" watchObservedRunningTime="2026-01-27 08:14:52.022725293 +0000 UTC m=+1397.675080805" Jan 27 08:14:58 crc kubenswrapper[4787]: I0127 08:14:58.850109 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:58 crc kubenswrapper[4787]: I0127 08:14:58.850889 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:58 crc kubenswrapper[4787]: I0127 08:14:58.901422 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:59 crc kubenswrapper[4787]: I0127 08:14:59.085485 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:14:59 crc kubenswrapper[4787]: I0127 08:14:59.132943 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4g7ns"] Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.148280 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr"] Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.149373 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.151851 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.152680 4787 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.165157 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr"] Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.281512 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ca43b74d-287b-422f-b051-db48f00b98ba-secret-volume\") pod \"collect-profiles-29491695-blvcr\" (UID: \"ca43b74d-287b-422f-b051-db48f00b98ba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.281802 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ca43b74d-287b-422f-b051-db48f00b98ba-config-volume\") pod \"collect-profiles-29491695-blvcr\" (UID: \"ca43b74d-287b-422f-b051-db48f00b98ba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.281865 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44gkw\" (UniqueName: \"kubernetes.io/projected/ca43b74d-287b-422f-b051-db48f00b98ba-kube-api-access-44gkw\") pod \"collect-profiles-29491695-blvcr\" (UID: \"ca43b74d-287b-422f-b051-db48f00b98ba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.383284 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ca43b74d-287b-422f-b051-db48f00b98ba-secret-volume\") pod \"collect-profiles-29491695-blvcr\" (UID: \"ca43b74d-287b-422f-b051-db48f00b98ba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.383792 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ca43b74d-287b-422f-b051-db48f00b98ba-config-volume\") pod \"collect-profiles-29491695-blvcr\" (UID: \"ca43b74d-287b-422f-b051-db48f00b98ba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.383818 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44gkw\" (UniqueName: \"kubernetes.io/projected/ca43b74d-287b-422f-b051-db48f00b98ba-kube-api-access-44gkw\") pod \"collect-profiles-29491695-blvcr\" (UID: \"ca43b74d-287b-422f-b051-db48f00b98ba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.384695 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ca43b74d-287b-422f-b051-db48f00b98ba-config-volume\") pod \"collect-profiles-29491695-blvcr\" (UID: \"ca43b74d-287b-422f-b051-db48f00b98ba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.389226 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ca43b74d-287b-422f-b051-db48f00b98ba-secret-volume\") pod \"collect-profiles-29491695-blvcr\" (UID: \"ca43b74d-287b-422f-b051-db48f00b98ba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.407282 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44gkw\" (UniqueName: \"kubernetes.io/projected/ca43b74d-287b-422f-b051-db48f00b98ba-kube-api-access-44gkw\") pod \"collect-profiles-29491695-blvcr\" (UID: \"ca43b74d-287b-422f-b051-db48f00b98ba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.478983 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" Jan 27 08:15:00 crc kubenswrapper[4787]: I0127 08:15:00.948900 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr"] Jan 27 08:15:01 crc kubenswrapper[4787]: I0127 08:15:01.052930 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" event={"ID":"ca43b74d-287b-422f-b051-db48f00b98ba","Type":"ContainerStarted","Data":"68d809d5825cca82ed36cccdd71cd7b017f6ddf63d8759382338a0b3c2b08ffe"} Jan 27 08:15:01 crc kubenswrapper[4787]: I0127 08:15:01.053106 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4g7ns" podUID="28c90b47-9ec6-477a-a770-60904a0a4236" containerName="registry-server" containerID="cri-o://3884bb8a07defe13492ec93fb1797ad29d963eecac9b644e081998844bc86085" gracePeriod=2 Jan 27 08:15:01 crc kubenswrapper[4787]: I0127 08:15:01.512441 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:15:01 crc kubenswrapper[4787]: I0127 08:15:01.607236 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vm8ks\" (UniqueName: \"kubernetes.io/projected/28c90b47-9ec6-477a-a770-60904a0a4236-kube-api-access-vm8ks\") pod \"28c90b47-9ec6-477a-a770-60904a0a4236\" (UID: \"28c90b47-9ec6-477a-a770-60904a0a4236\") " Jan 27 08:15:01 crc kubenswrapper[4787]: I0127 08:15:01.607331 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c90b47-9ec6-477a-a770-60904a0a4236-utilities\") pod \"28c90b47-9ec6-477a-a770-60904a0a4236\" (UID: \"28c90b47-9ec6-477a-a770-60904a0a4236\") " Jan 27 08:15:01 crc kubenswrapper[4787]: I0127 08:15:01.607372 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c90b47-9ec6-477a-a770-60904a0a4236-catalog-content\") pod \"28c90b47-9ec6-477a-a770-60904a0a4236\" (UID: \"28c90b47-9ec6-477a-a770-60904a0a4236\") " Jan 27 08:15:01 crc kubenswrapper[4787]: I0127 08:15:01.613891 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28c90b47-9ec6-477a-a770-60904a0a4236-utilities" (OuterVolumeSpecName: "utilities") pod "28c90b47-9ec6-477a-a770-60904a0a4236" (UID: "28c90b47-9ec6-477a-a770-60904a0a4236"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:15:01 crc kubenswrapper[4787]: I0127 08:15:01.640649 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28c90b47-9ec6-477a-a770-60904a0a4236-kube-api-access-vm8ks" (OuterVolumeSpecName: "kube-api-access-vm8ks") pod "28c90b47-9ec6-477a-a770-60904a0a4236" (UID: "28c90b47-9ec6-477a-a770-60904a0a4236"). InnerVolumeSpecName "kube-api-access-vm8ks". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:15:01 crc kubenswrapper[4787]: I0127 08:15:01.709095 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vm8ks\" (UniqueName: \"kubernetes.io/projected/28c90b47-9ec6-477a-a770-60904a0a4236-kube-api-access-vm8ks\") on node \"crc\" DevicePath \"\"" Jan 27 08:15:01 crc kubenswrapper[4787]: I0127 08:15:01.709331 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c90b47-9ec6-477a-a770-60904a0a4236-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 08:15:01 crc kubenswrapper[4787]: I0127 08:15:01.748307 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28c90b47-9ec6-477a-a770-60904a0a4236-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "28c90b47-9ec6-477a-a770-60904a0a4236" (UID: "28c90b47-9ec6-477a-a770-60904a0a4236"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:15:01 crc kubenswrapper[4787]: I0127 08:15:01.811262 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c90b47-9ec6-477a-a770-60904a0a4236-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.062117 4787 generic.go:334] "Generic (PLEG): container finished" podID="ca43b74d-287b-422f-b051-db48f00b98ba" containerID="ebf7c142fe0d519190036ee75ceaaf9a044d1a3b84003048844748f7dc5ffc20" exitCode=0 Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.062233 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" event={"ID":"ca43b74d-287b-422f-b051-db48f00b98ba","Type":"ContainerDied","Data":"ebf7c142fe0d519190036ee75ceaaf9a044d1a3b84003048844748f7dc5ffc20"} Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.064705 4787 generic.go:334] "Generic (PLEG): container finished" podID="28c90b47-9ec6-477a-a770-60904a0a4236" containerID="3884bb8a07defe13492ec93fb1797ad29d963eecac9b644e081998844bc86085" exitCode=0 Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.064753 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4g7ns" event={"ID":"28c90b47-9ec6-477a-a770-60904a0a4236","Type":"ContainerDied","Data":"3884bb8a07defe13492ec93fb1797ad29d963eecac9b644e081998844bc86085"} Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.064781 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4g7ns" event={"ID":"28c90b47-9ec6-477a-a770-60904a0a4236","Type":"ContainerDied","Data":"24234f9674cf861fa7b19408034aa2877fd4e5aaceb6d880efe0e608f15b8ca1"} Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.064790 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4g7ns" Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.064865 4787 scope.go:117] "RemoveContainer" containerID="3884bb8a07defe13492ec93fb1797ad29d963eecac9b644e081998844bc86085" Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.091337 4787 scope.go:117] "RemoveContainer" containerID="6ca8c474a5fe3de819fd795d3b2739afafc446c1cdd4220442f048ba2291eaf2" Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.107488 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4g7ns"] Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.110646 4787 scope.go:117] "RemoveContainer" containerID="8154237b3ba079aff74e2c4c4e45d7909ce1ee4e70679a73d5b8e5f1e23426f1" Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.126834 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4g7ns"] Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.139477 4787 scope.go:117] "RemoveContainer" containerID="3884bb8a07defe13492ec93fb1797ad29d963eecac9b644e081998844bc86085" Jan 27 08:15:02 crc kubenswrapper[4787]: E0127 08:15:02.139936 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3884bb8a07defe13492ec93fb1797ad29d963eecac9b644e081998844bc86085\": container with ID starting with 3884bb8a07defe13492ec93fb1797ad29d963eecac9b644e081998844bc86085 not found: ID does not exist" containerID="3884bb8a07defe13492ec93fb1797ad29d963eecac9b644e081998844bc86085" Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.139983 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3884bb8a07defe13492ec93fb1797ad29d963eecac9b644e081998844bc86085"} err="failed to get container status \"3884bb8a07defe13492ec93fb1797ad29d963eecac9b644e081998844bc86085\": rpc error: code = NotFound desc = could not find container \"3884bb8a07defe13492ec93fb1797ad29d963eecac9b644e081998844bc86085\": container with ID starting with 3884bb8a07defe13492ec93fb1797ad29d963eecac9b644e081998844bc86085 not found: ID does not exist" Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.140008 4787 scope.go:117] "RemoveContainer" containerID="6ca8c474a5fe3de819fd795d3b2739afafc446c1cdd4220442f048ba2291eaf2" Jan 27 08:15:02 crc kubenswrapper[4787]: E0127 08:15:02.140586 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ca8c474a5fe3de819fd795d3b2739afafc446c1cdd4220442f048ba2291eaf2\": container with ID starting with 6ca8c474a5fe3de819fd795d3b2739afafc446c1cdd4220442f048ba2291eaf2 not found: ID does not exist" containerID="6ca8c474a5fe3de819fd795d3b2739afafc446c1cdd4220442f048ba2291eaf2" Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.140638 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ca8c474a5fe3de819fd795d3b2739afafc446c1cdd4220442f048ba2291eaf2"} err="failed to get container status \"6ca8c474a5fe3de819fd795d3b2739afafc446c1cdd4220442f048ba2291eaf2\": rpc error: code = NotFound desc = could not find container \"6ca8c474a5fe3de819fd795d3b2739afafc446c1cdd4220442f048ba2291eaf2\": container with ID starting with 6ca8c474a5fe3de819fd795d3b2739afafc446c1cdd4220442f048ba2291eaf2 not found: ID does not exist" Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.140674 4787 scope.go:117] "RemoveContainer" containerID="8154237b3ba079aff74e2c4c4e45d7909ce1ee4e70679a73d5b8e5f1e23426f1" Jan 27 08:15:02 crc kubenswrapper[4787]: E0127 08:15:02.141162 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8154237b3ba079aff74e2c4c4e45d7909ce1ee4e70679a73d5b8e5f1e23426f1\": container with ID starting with 8154237b3ba079aff74e2c4c4e45d7909ce1ee4e70679a73d5b8e5f1e23426f1 not found: ID does not exist" containerID="8154237b3ba079aff74e2c4c4e45d7909ce1ee4e70679a73d5b8e5f1e23426f1" Jan 27 08:15:02 crc kubenswrapper[4787]: I0127 08:15:02.141196 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8154237b3ba079aff74e2c4c4e45d7909ce1ee4e70679a73d5b8e5f1e23426f1"} err="failed to get container status \"8154237b3ba079aff74e2c4c4e45d7909ce1ee4e70679a73d5b8e5f1e23426f1\": rpc error: code = NotFound desc = could not find container \"8154237b3ba079aff74e2c4c4e45d7909ce1ee4e70679a73d5b8e5f1e23426f1\": container with ID starting with 8154237b3ba079aff74e2c4c4e45d7909ce1ee4e70679a73d5b8e5f1e23426f1 not found: ID does not exist" Jan 27 08:15:03 crc kubenswrapper[4787]: I0127 08:15:03.085903 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28c90b47-9ec6-477a-a770-60904a0a4236" path="/var/lib/kubelet/pods/28c90b47-9ec6-477a-a770-60904a0a4236/volumes" Jan 27 08:15:03 crc kubenswrapper[4787]: I0127 08:15:03.408144 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" Jan 27 08:15:03 crc kubenswrapper[4787]: I0127 08:15:03.542905 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ca43b74d-287b-422f-b051-db48f00b98ba-secret-volume\") pod \"ca43b74d-287b-422f-b051-db48f00b98ba\" (UID: \"ca43b74d-287b-422f-b051-db48f00b98ba\") " Jan 27 08:15:03 crc kubenswrapper[4787]: I0127 08:15:03.543076 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ca43b74d-287b-422f-b051-db48f00b98ba-config-volume\") pod \"ca43b74d-287b-422f-b051-db48f00b98ba\" (UID: \"ca43b74d-287b-422f-b051-db48f00b98ba\") " Jan 27 08:15:03 crc kubenswrapper[4787]: I0127 08:15:03.543116 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44gkw\" (UniqueName: \"kubernetes.io/projected/ca43b74d-287b-422f-b051-db48f00b98ba-kube-api-access-44gkw\") pod \"ca43b74d-287b-422f-b051-db48f00b98ba\" (UID: \"ca43b74d-287b-422f-b051-db48f00b98ba\") " Jan 27 08:15:03 crc kubenswrapper[4787]: I0127 08:15:03.544489 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca43b74d-287b-422f-b051-db48f00b98ba-config-volume" (OuterVolumeSpecName: "config-volume") pod "ca43b74d-287b-422f-b051-db48f00b98ba" (UID: "ca43b74d-287b-422f-b051-db48f00b98ba"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 08:15:03 crc kubenswrapper[4787]: I0127 08:15:03.550648 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca43b74d-287b-422f-b051-db48f00b98ba-kube-api-access-44gkw" (OuterVolumeSpecName: "kube-api-access-44gkw") pod "ca43b74d-287b-422f-b051-db48f00b98ba" (UID: "ca43b74d-287b-422f-b051-db48f00b98ba"). InnerVolumeSpecName "kube-api-access-44gkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:15:03 crc kubenswrapper[4787]: I0127 08:15:03.550948 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca43b74d-287b-422f-b051-db48f00b98ba-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ca43b74d-287b-422f-b051-db48f00b98ba" (UID: "ca43b74d-287b-422f-b051-db48f00b98ba"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 08:15:03 crc kubenswrapper[4787]: I0127 08:15:03.644901 4787 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ca43b74d-287b-422f-b051-db48f00b98ba-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 27 08:15:03 crc kubenswrapper[4787]: I0127 08:15:03.645159 4787 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ca43b74d-287b-422f-b051-db48f00b98ba-config-volume\") on node \"crc\" DevicePath \"\"" Jan 27 08:15:03 crc kubenswrapper[4787]: I0127 08:15:03.645318 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44gkw\" (UniqueName: \"kubernetes.io/projected/ca43b74d-287b-422f-b051-db48f00b98ba-kube-api-access-44gkw\") on node \"crc\" DevicePath \"\"" Jan 27 08:15:04 crc kubenswrapper[4787]: I0127 08:15:04.081743 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" event={"ID":"ca43b74d-287b-422f-b051-db48f00b98ba","Type":"ContainerDied","Data":"68d809d5825cca82ed36cccdd71cd7b017f6ddf63d8759382338a0b3c2b08ffe"} Jan 27 08:15:04 crc kubenswrapper[4787]: I0127 08:15:04.081788 4787 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68d809d5825cca82ed36cccdd71cd7b017f6ddf63d8759382338a0b3c2b08ffe" Jan 27 08:15:04 crc kubenswrapper[4787]: I0127 08:15:04.081795 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29491695-blvcr" Jan 27 08:15:22 crc kubenswrapper[4787]: I0127 08:15:22.822710 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:15:22 crc kubenswrapper[4787]: I0127 08:15:22.823346 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:15:35 crc kubenswrapper[4787]: I0127 08:15:35.753795 4787 scope.go:117] "RemoveContainer" containerID="d304505100a9e40f977481818afbc2239ed1910ed8a9349b8d4d47740a9431bb" Jan 27 08:15:52 crc kubenswrapper[4787]: I0127 08:15:52.823109 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:15:52 crc kubenswrapper[4787]: I0127 08:15:52.823878 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.483704 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kf9sw"] Jan 27 08:16:04 crc kubenswrapper[4787]: E0127 08:16:04.484918 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28c90b47-9ec6-477a-a770-60904a0a4236" containerName="extract-content" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.484937 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="28c90b47-9ec6-477a-a770-60904a0a4236" containerName="extract-content" Jan 27 08:16:04 crc kubenswrapper[4787]: E0127 08:16:04.484967 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca43b74d-287b-422f-b051-db48f00b98ba" containerName="collect-profiles" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.485055 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca43b74d-287b-422f-b051-db48f00b98ba" containerName="collect-profiles" Jan 27 08:16:04 crc kubenswrapper[4787]: E0127 08:16:04.485078 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28c90b47-9ec6-477a-a770-60904a0a4236" containerName="registry-server" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.485088 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="28c90b47-9ec6-477a-a770-60904a0a4236" containerName="registry-server" Jan 27 08:16:04 crc kubenswrapper[4787]: E0127 08:16:04.485110 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28c90b47-9ec6-477a-a770-60904a0a4236" containerName="extract-utilities" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.485120 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="28c90b47-9ec6-477a-a770-60904a0a4236" containerName="extract-utilities" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.485328 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca43b74d-287b-422f-b051-db48f00b98ba" containerName="collect-profiles" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.485340 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="28c90b47-9ec6-477a-a770-60904a0a4236" containerName="registry-server" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.486993 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.507260 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kf9sw"] Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.646131 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4fe0298-627e-4958-8321-0ee99f5aa127-utilities\") pod \"community-operators-kf9sw\" (UID: \"d4fe0298-627e-4958-8321-0ee99f5aa127\") " pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.646217 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4fe0298-627e-4958-8321-0ee99f5aa127-catalog-content\") pod \"community-operators-kf9sw\" (UID: \"d4fe0298-627e-4958-8321-0ee99f5aa127\") " pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.646247 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmbrw\" (UniqueName: \"kubernetes.io/projected/d4fe0298-627e-4958-8321-0ee99f5aa127-kube-api-access-dmbrw\") pod \"community-operators-kf9sw\" (UID: \"d4fe0298-627e-4958-8321-0ee99f5aa127\") " pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.747691 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4fe0298-627e-4958-8321-0ee99f5aa127-utilities\") pod \"community-operators-kf9sw\" (UID: \"d4fe0298-627e-4958-8321-0ee99f5aa127\") " pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.747797 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4fe0298-627e-4958-8321-0ee99f5aa127-catalog-content\") pod \"community-operators-kf9sw\" (UID: \"d4fe0298-627e-4958-8321-0ee99f5aa127\") " pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.747831 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmbrw\" (UniqueName: \"kubernetes.io/projected/d4fe0298-627e-4958-8321-0ee99f5aa127-kube-api-access-dmbrw\") pod \"community-operators-kf9sw\" (UID: \"d4fe0298-627e-4958-8321-0ee99f5aa127\") " pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.748387 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4fe0298-627e-4958-8321-0ee99f5aa127-catalog-content\") pod \"community-operators-kf9sw\" (UID: \"d4fe0298-627e-4958-8321-0ee99f5aa127\") " pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.748514 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4fe0298-627e-4958-8321-0ee99f5aa127-utilities\") pod \"community-operators-kf9sw\" (UID: \"d4fe0298-627e-4958-8321-0ee99f5aa127\") " pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.767513 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmbrw\" (UniqueName: \"kubernetes.io/projected/d4fe0298-627e-4958-8321-0ee99f5aa127-kube-api-access-dmbrw\") pod \"community-operators-kf9sw\" (UID: \"d4fe0298-627e-4958-8321-0ee99f5aa127\") " pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:04 crc kubenswrapper[4787]: I0127 08:16:04.825429 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:05 crc kubenswrapper[4787]: I0127 08:16:05.172285 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kf9sw"] Jan 27 08:16:05 crc kubenswrapper[4787]: I0127 08:16:05.590576 4787 generic.go:334] "Generic (PLEG): container finished" podID="d4fe0298-627e-4958-8321-0ee99f5aa127" containerID="546e611ab78f2ee068246cd0fcffa66f929314c19a038ba5bf735069a78b5d84" exitCode=0 Jan 27 08:16:05 crc kubenswrapper[4787]: I0127 08:16:05.590642 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kf9sw" event={"ID":"d4fe0298-627e-4958-8321-0ee99f5aa127","Type":"ContainerDied","Data":"546e611ab78f2ee068246cd0fcffa66f929314c19a038ba5bf735069a78b5d84"} Jan 27 08:16:05 crc kubenswrapper[4787]: I0127 08:16:05.591002 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kf9sw" event={"ID":"d4fe0298-627e-4958-8321-0ee99f5aa127","Type":"ContainerStarted","Data":"a95fa1435edc718a694995255225adb88985d99c5cdf70d182120e41920d9610"} Jan 27 08:16:06 crc kubenswrapper[4787]: I0127 08:16:06.599719 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kf9sw" event={"ID":"d4fe0298-627e-4958-8321-0ee99f5aa127","Type":"ContainerStarted","Data":"d56cb21340678a3fcbfc0a9222f8a333c974898b22f227e88d5d4124b6a67e7b"} Jan 27 08:16:07 crc kubenswrapper[4787]: I0127 08:16:07.609665 4787 generic.go:334] "Generic (PLEG): container finished" podID="d4fe0298-627e-4958-8321-0ee99f5aa127" containerID="d56cb21340678a3fcbfc0a9222f8a333c974898b22f227e88d5d4124b6a67e7b" exitCode=0 Jan 27 08:16:07 crc kubenswrapper[4787]: I0127 08:16:07.609717 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kf9sw" event={"ID":"d4fe0298-627e-4958-8321-0ee99f5aa127","Type":"ContainerDied","Data":"d56cb21340678a3fcbfc0a9222f8a333c974898b22f227e88d5d4124b6a67e7b"} Jan 27 08:16:08 crc kubenswrapper[4787]: I0127 08:16:08.618565 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kf9sw" event={"ID":"d4fe0298-627e-4958-8321-0ee99f5aa127","Type":"ContainerStarted","Data":"857641d24fd05870cf3b38ba69796ac9d253587fbf47c7a878304718b743001d"} Jan 27 08:16:08 crc kubenswrapper[4787]: I0127 08:16:08.640875 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kf9sw" podStartSLOduration=2.127660977 podStartE2EDuration="4.64085023s" podCreationTimestamp="2026-01-27 08:16:04 +0000 UTC" firstStartedPulling="2026-01-27 08:16:05.591958618 +0000 UTC m=+1471.244314110" lastFinishedPulling="2026-01-27 08:16:08.105147861 +0000 UTC m=+1473.757503363" observedRunningTime="2026-01-27 08:16:08.638262327 +0000 UTC m=+1474.290617839" watchObservedRunningTime="2026-01-27 08:16:08.64085023 +0000 UTC m=+1474.293205722" Jan 27 08:16:14 crc kubenswrapper[4787]: I0127 08:16:14.826239 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:14 crc kubenswrapper[4787]: I0127 08:16:14.826904 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:14 crc kubenswrapper[4787]: I0127 08:16:14.870025 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:15 crc kubenswrapper[4787]: I0127 08:16:15.734278 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:15 crc kubenswrapper[4787]: I0127 08:16:15.816640 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kf9sw"] Jan 27 08:16:17 crc kubenswrapper[4787]: I0127 08:16:17.689958 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kf9sw" podUID="d4fe0298-627e-4958-8321-0ee99f5aa127" containerName="registry-server" containerID="cri-o://857641d24fd05870cf3b38ba69796ac9d253587fbf47c7a878304718b743001d" gracePeriod=2 Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.103580 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.207197 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmbrw\" (UniqueName: \"kubernetes.io/projected/d4fe0298-627e-4958-8321-0ee99f5aa127-kube-api-access-dmbrw\") pod \"d4fe0298-627e-4958-8321-0ee99f5aa127\" (UID: \"d4fe0298-627e-4958-8321-0ee99f5aa127\") " Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.207781 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4fe0298-627e-4958-8321-0ee99f5aa127-catalog-content\") pod \"d4fe0298-627e-4958-8321-0ee99f5aa127\" (UID: \"d4fe0298-627e-4958-8321-0ee99f5aa127\") " Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.207955 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4fe0298-627e-4958-8321-0ee99f5aa127-utilities\") pod \"d4fe0298-627e-4958-8321-0ee99f5aa127\" (UID: \"d4fe0298-627e-4958-8321-0ee99f5aa127\") " Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.209537 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4fe0298-627e-4958-8321-0ee99f5aa127-utilities" (OuterVolumeSpecName: "utilities") pod "d4fe0298-627e-4958-8321-0ee99f5aa127" (UID: "d4fe0298-627e-4958-8321-0ee99f5aa127"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.217756 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4fe0298-627e-4958-8321-0ee99f5aa127-kube-api-access-dmbrw" (OuterVolumeSpecName: "kube-api-access-dmbrw") pod "d4fe0298-627e-4958-8321-0ee99f5aa127" (UID: "d4fe0298-627e-4958-8321-0ee99f5aa127"). InnerVolumeSpecName "kube-api-access-dmbrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.260405 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4fe0298-627e-4958-8321-0ee99f5aa127-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d4fe0298-627e-4958-8321-0ee99f5aa127" (UID: "d4fe0298-627e-4958-8321-0ee99f5aa127"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.310430 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmbrw\" (UniqueName: \"kubernetes.io/projected/d4fe0298-627e-4958-8321-0ee99f5aa127-kube-api-access-dmbrw\") on node \"crc\" DevicePath \"\"" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.310454 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4fe0298-627e-4958-8321-0ee99f5aa127-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.310466 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4fe0298-627e-4958-8321-0ee99f5aa127-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.700826 4787 generic.go:334] "Generic (PLEG): container finished" podID="d4fe0298-627e-4958-8321-0ee99f5aa127" containerID="857641d24fd05870cf3b38ba69796ac9d253587fbf47c7a878304718b743001d" exitCode=0 Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.700880 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kf9sw" event={"ID":"d4fe0298-627e-4958-8321-0ee99f5aa127","Type":"ContainerDied","Data":"857641d24fd05870cf3b38ba69796ac9d253587fbf47c7a878304718b743001d"} Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.700911 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kf9sw" event={"ID":"d4fe0298-627e-4958-8321-0ee99f5aa127","Type":"ContainerDied","Data":"a95fa1435edc718a694995255225adb88985d99c5cdf70d182120e41920d9610"} Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.700934 4787 scope.go:117] "RemoveContainer" containerID="857641d24fd05870cf3b38ba69796ac9d253587fbf47c7a878304718b743001d" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.700948 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kf9sw" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.735047 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kf9sw"] Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.735257 4787 scope.go:117] "RemoveContainer" containerID="d56cb21340678a3fcbfc0a9222f8a333c974898b22f227e88d5d4124b6a67e7b" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.741299 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kf9sw"] Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.757888 4787 scope.go:117] "RemoveContainer" containerID="546e611ab78f2ee068246cd0fcffa66f929314c19a038ba5bf735069a78b5d84" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.788255 4787 scope.go:117] "RemoveContainer" containerID="857641d24fd05870cf3b38ba69796ac9d253587fbf47c7a878304718b743001d" Jan 27 08:16:18 crc kubenswrapper[4787]: E0127 08:16:18.789919 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"857641d24fd05870cf3b38ba69796ac9d253587fbf47c7a878304718b743001d\": container with ID starting with 857641d24fd05870cf3b38ba69796ac9d253587fbf47c7a878304718b743001d not found: ID does not exist" containerID="857641d24fd05870cf3b38ba69796ac9d253587fbf47c7a878304718b743001d" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.789983 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"857641d24fd05870cf3b38ba69796ac9d253587fbf47c7a878304718b743001d"} err="failed to get container status \"857641d24fd05870cf3b38ba69796ac9d253587fbf47c7a878304718b743001d\": rpc error: code = NotFound desc = could not find container \"857641d24fd05870cf3b38ba69796ac9d253587fbf47c7a878304718b743001d\": container with ID starting with 857641d24fd05870cf3b38ba69796ac9d253587fbf47c7a878304718b743001d not found: ID does not exist" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.790024 4787 scope.go:117] "RemoveContainer" containerID="d56cb21340678a3fcbfc0a9222f8a333c974898b22f227e88d5d4124b6a67e7b" Jan 27 08:16:18 crc kubenswrapper[4787]: E0127 08:16:18.790468 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d56cb21340678a3fcbfc0a9222f8a333c974898b22f227e88d5d4124b6a67e7b\": container with ID starting with d56cb21340678a3fcbfc0a9222f8a333c974898b22f227e88d5d4124b6a67e7b not found: ID does not exist" containerID="d56cb21340678a3fcbfc0a9222f8a333c974898b22f227e88d5d4124b6a67e7b" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.790492 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d56cb21340678a3fcbfc0a9222f8a333c974898b22f227e88d5d4124b6a67e7b"} err="failed to get container status \"d56cb21340678a3fcbfc0a9222f8a333c974898b22f227e88d5d4124b6a67e7b\": rpc error: code = NotFound desc = could not find container \"d56cb21340678a3fcbfc0a9222f8a333c974898b22f227e88d5d4124b6a67e7b\": container with ID starting with d56cb21340678a3fcbfc0a9222f8a333c974898b22f227e88d5d4124b6a67e7b not found: ID does not exist" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.790507 4787 scope.go:117] "RemoveContainer" containerID="546e611ab78f2ee068246cd0fcffa66f929314c19a038ba5bf735069a78b5d84" Jan 27 08:16:18 crc kubenswrapper[4787]: E0127 08:16:18.790846 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"546e611ab78f2ee068246cd0fcffa66f929314c19a038ba5bf735069a78b5d84\": container with ID starting with 546e611ab78f2ee068246cd0fcffa66f929314c19a038ba5bf735069a78b5d84 not found: ID does not exist" containerID="546e611ab78f2ee068246cd0fcffa66f929314c19a038ba5bf735069a78b5d84" Jan 27 08:16:18 crc kubenswrapper[4787]: I0127 08:16:18.790864 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"546e611ab78f2ee068246cd0fcffa66f929314c19a038ba5bf735069a78b5d84"} err="failed to get container status \"546e611ab78f2ee068246cd0fcffa66f929314c19a038ba5bf735069a78b5d84\": rpc error: code = NotFound desc = could not find container \"546e611ab78f2ee068246cd0fcffa66f929314c19a038ba5bf735069a78b5d84\": container with ID starting with 546e611ab78f2ee068246cd0fcffa66f929314c19a038ba5bf735069a78b5d84 not found: ID does not exist" Jan 27 08:16:19 crc kubenswrapper[4787]: I0127 08:16:19.087708 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4fe0298-627e-4958-8321-0ee99f5aa127" path="/var/lib/kubelet/pods/d4fe0298-627e-4958-8321-0ee99f5aa127/volumes" Jan 27 08:16:22 crc kubenswrapper[4787]: I0127 08:16:22.823383 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:16:22 crc kubenswrapper[4787]: I0127 08:16:22.823858 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:16:22 crc kubenswrapper[4787]: I0127 08:16:22.823925 4787 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 08:16:22 crc kubenswrapper[4787]: I0127 08:16:22.824944 4787 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"66c62556ef822841ab93c99b23fda5b0b3ea129b94ebcca6f4cff0b2d8c7f32e"} pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 27 08:16:22 crc kubenswrapper[4787]: I0127 08:16:22.825011 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" containerID="cri-o://66c62556ef822841ab93c99b23fda5b0b3ea129b94ebcca6f4cff0b2d8c7f32e" gracePeriod=600 Jan 27 08:16:23 crc kubenswrapper[4787]: I0127 08:16:23.743328 4787 generic.go:334] "Generic (PLEG): container finished" podID="f051e184-acac-47cf-9e04-9df648288715" containerID="66c62556ef822841ab93c99b23fda5b0b3ea129b94ebcca6f4cff0b2d8c7f32e" exitCode=0 Jan 27 08:16:23 crc kubenswrapper[4787]: I0127 08:16:23.743585 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerDied","Data":"66c62556ef822841ab93c99b23fda5b0b3ea129b94ebcca6f4cff0b2d8c7f32e"} Jan 27 08:16:23 crc kubenswrapper[4787]: I0127 08:16:23.743700 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerStarted","Data":"125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e"} Jan 27 08:16:23 crc kubenswrapper[4787]: I0127 08:16:23.743747 4787 scope.go:117] "RemoveContainer" containerID="62fa4f58004172098a708acf93c4ba2c2d75c5b2ad098327437e26bfa28ab448" Jan 27 08:16:35 crc kubenswrapper[4787]: I0127 08:16:35.891471 4787 scope.go:117] "RemoveContainer" containerID="50c79c9d4510d9aad33c8c6b7d7b07dfbec318be7cf6d78cc13f3f9ebe9f87c3" Jan 27 08:16:37 crc kubenswrapper[4787]: I0127 08:16:37.180418 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-5828-account-create-update-xh7r6_646de171-9e06-4df6-9e1e-07319c04b95c/mariadb-account-create-update/0.log" Jan 27 08:16:37 crc kubenswrapper[4787]: I0127 08:16:37.708427 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-678dff9ff-k6jwl_91494b32-1284-49fb-b548-3fbc5f1e1ddf/keystone-api/0.log" Jan 27 08:16:38 crc kubenswrapper[4787]: I0127 08:16:38.247682 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-bootstrap-bn5rd_6d885638-7008-41c1-85fc-43025469e404/keystone-bootstrap/0.log" Jan 27 08:16:38 crc kubenswrapper[4787]: I0127 08:16:38.746204 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-db-create-wjmqh_205e2c3c-82ef-4aaa-b1b3-0465a855bde9/mariadb-database-create/0.log" Jan 27 08:16:39 crc kubenswrapper[4787]: I0127 08:16:39.245076 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-db-sync-ztn8q_c5da7883-eba0-4fa4-92c5-9085b0ce0090/keystone-db-sync/0.log" Jan 27 08:16:39 crc kubenswrapper[4787]: I0127 08:16:39.943580 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_memcached-0_b0fc932b-0d21-426c-9b58-15c14ad9e95b/memcached/0.log" Jan 27 08:16:40 crc kubenswrapper[4787]: I0127 08:16:40.455197 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-cell1-galera-0_841b999e-6e42-4f28-8fd8-334f69c3e3e6/galera/0.log" Jan 27 08:16:40 crc kubenswrapper[4787]: I0127 08:16:40.987120 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-galera-0_144491fe-49b9-4a76-8da2-db798cf6a1e4/galera/0.log" Jan 27 08:16:41 crc kubenswrapper[4787]: I0127 08:16:41.512534 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstackclient_4826cdba-006d-447f-a206-367bd1dc8893/openstackclient/0.log" Jan 27 08:16:42 crc kubenswrapper[4787]: I0127 08:16:42.058427 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-545ff75684-kqpgf_7d647302-7ff9-465d-b5a5-a3a76f35df28/placement-log/0.log" Jan 27 08:16:42 crc kubenswrapper[4787]: I0127 08:16:42.561312 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-6d8d-account-create-update-5jvm9_2e3d75aa-1156-48f5-9435-b8b3cd2ec555/mariadb-account-create-update/0.log" Jan 27 08:16:43 crc kubenswrapper[4787]: I0127 08:16:43.034766 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-db-create-8rt5v_7ba9e1b4-f385-47f9-80f6-508a3406aa2a/mariadb-database-create/0.log" Jan 27 08:16:43 crc kubenswrapper[4787]: I0127 08:16:43.397124 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-db-sync-7x42z_54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8/placement-db-sync/0.log" Jan 27 08:16:43 crc kubenswrapper[4787]: I0127 08:16:43.831951 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-broadcaster-server-0_68be2504-1de8-427c-9937-bd0428f7c5c4/rabbitmq/0.log" Jan 27 08:16:44 crc kubenswrapper[4787]: I0127 08:16:44.346921 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-cell1-server-0_faff0a15-880a-4cf7-a0e0-81d573ace274/rabbitmq/0.log" Jan 27 08:16:44 crc kubenswrapper[4787]: I0127 08:16:44.768041 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-notifications-server-0_1d9a6f95-2510-4e74-b4f7-fb592d761c91/rabbitmq/0.log" Jan 27 08:16:45 crc kubenswrapper[4787]: I0127 08:16:45.206185 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-server-0_f84bfff4-b4f2-40d8-8b81-a9e5eb776442/rabbitmq/0.log" Jan 27 08:16:45 crc kubenswrapper[4787]: I0127 08:16:45.595656 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_root-account-create-update-b4lsd_c3150913-c395-4ed0-a5d3-616426f58671/mariadb-account-create-update/0.log" Jan 27 08:17:16 crc kubenswrapper[4787]: I0127 08:17:16.415610 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2_46a8c934-ec5d-48f9-8c39-f4e4c730d7a5/extract/0.log" Jan 27 08:17:16 crc kubenswrapper[4787]: I0127 08:17:16.859236 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-65ff799cfd-d5r5z_4b5515cb-f539-4a9b-8c46-36c5c62c5c93/manager/0.log" Jan 27 08:17:17 crc kubenswrapper[4787]: I0127 08:17:17.305299 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-655bf9cfbb-6tnpg_85bafad5-30f3-4931-bd2a-0d45e2b0f844/manager/0.log" Jan 27 08:17:17 crc kubenswrapper[4787]: I0127 08:17:17.731699 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-77554cdc5c-627g6_dd8dbc24-8fb8-4ae2-96c3-b7112c7d7c65/manager/0.log" Jan 27 08:17:18 crc kubenswrapper[4787]: I0127 08:17:18.164082 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g_f63e7c4f-d194-4209-a8ac-f67c9dc7dde1/extract/0.log" Jan 27 08:17:18 crc kubenswrapper[4787]: I0127 08:17:18.578583 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-67dd55ff59-kk6mj_be4be3e5-1589-4e84-9d35-f104bc3d5ad4/manager/0.log" Jan 27 08:17:18 crc kubenswrapper[4787]: I0127 08:17:18.996662 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-575ffb885b-57zhq_f621b9f3-ead9-4fab-b33f-f9d6179e8f3f/manager/0.log" Jan 27 08:17:19 crc kubenswrapper[4787]: I0127 08:17:19.405660 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-vkmnm_a21cc0b9-75d2-4ddb-925e-23eeaac2dd35/manager/0.log" Jan 27 08:17:19 crc kubenswrapper[4787]: I0127 08:17:19.873602 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d75bc88d5-drtqw_c634491f-6069-472d-bff7-d8903d0afa1d/manager/0.log" Jan 27 08:17:20 crc kubenswrapper[4787]: I0127 08:17:20.296484 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-768b776ffb-j7klk_ff52e5a2-2aab-451a-8869-72c1a506940a/manager/0.log" Jan 27 08:17:20 crc kubenswrapper[4787]: I0127 08:17:20.743524 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-55f684fd56-2k8pd_32019c4d-61e3-4c27-86d4-3a79bb40ce70/manager/0.log" Jan 27 08:17:21 crc kubenswrapper[4787]: I0127 08:17:21.157052 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-849fcfbb6b-tbr5m_59efc0ff-5727-48f9-91b7-36533ba5f94a/manager/0.log" Jan 27 08:17:21 crc kubenswrapper[4787]: I0127 08:17:21.601600 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6b9fb5fdcb-88gn4_e59e94f5-9033-488d-a186-476cb6cbb3f0/manager/0.log" Jan 27 08:17:22 crc kubenswrapper[4787]: I0127 08:17:22.044565 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-7ffd8d76d4-vk5vt_be114c8e-3aa0-41b4-9954-d07c800d3cfc/manager/0.log" Jan 27 08:17:22 crc kubenswrapper[4787]: I0127 08:17:22.500153 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57d6b69d8b-j9nxt_b0a80c53-be1f-444d-a49a-05c89daad297/manager/0.log" Jan 27 08:17:22 crc kubenswrapper[4787]: I0127 08:17:22.957043 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-index-2jn9v_804d50f1-7da5-4d0c-8c0e-cac4c5e5a244/registry-server/0.log" Jan 27 08:17:23 crc kubenswrapper[4787]: I0127 08:17:23.376800 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7875d7675-x4nz5_601fe40b-5553-4225-b5bd-214428d6fa68/manager/0.log" Jan 27 08:17:23 crc kubenswrapper[4787]: I0127 08:17:23.794225 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq_116b0532-a8d5-47ec-8e12-e7ef482094d6/manager/0.log" Jan 27 08:17:24 crc kubenswrapper[4787]: I0127 08:17:24.515265 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-c595dbb59-xc2xn_7a0ecc5c-eebe-457b-9a72-804226878c7f/manager/0.log" Jan 27 08:17:24 crc kubenswrapper[4787]: I0127 08:17:24.967871 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-cnfx4_ba39dacd-d1da-4a2b-a4b9-fa0e917986f0/registry-server/0.log" Jan 27 08:17:25 crc kubenswrapper[4787]: I0127 08:17:25.357635 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6f75f45d54-l9mc7_7cae616e-6aa8-405f-b10a-2a5346fae5b4/manager/0.log" Jan 27 08:17:25 crc kubenswrapper[4787]: I0127 08:17:25.801989 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-79d5ccc684-qhnz4_a552e9e7-f9fa-4b71-9ec6-848a2230279f/manager/0.log" Jan 27 08:17:26 crc kubenswrapper[4787]: I0127 08:17:26.270515 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-5lxsh_2cf3085a-51c4-42a7-a788-8e9dd9dbe4c7/operator/0.log" Jan 27 08:17:26 crc kubenswrapper[4787]: I0127 08:17:26.670543 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-pxbjw_89d7a7e7-443a-486f-b8b7-a024082b9fba/manager/0.log" Jan 27 08:17:27 crc kubenswrapper[4787]: I0127 08:17:27.111198 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-799bc87c89-cqpjk_7986c3dd-6d06-4892-9fed-1b396669685b/manager/0.log" Jan 27 08:17:27 crc kubenswrapper[4787]: I0127 08:17:27.504186 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-dnxpj_7ec92388-f799-43cc-9235-6d4b717bc98e/manager/0.log" Jan 27 08:17:27 crc kubenswrapper[4787]: I0127 08:17:27.928345 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75db85654f-8cmb9_292c617a-b64f-4b53-b05e-360769308e43/manager/0.log" Jan 27 08:17:32 crc kubenswrapper[4787]: I0127 08:17:32.715102 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-5828-account-create-update-xh7r6_646de171-9e06-4df6-9e1e-07319c04b95c/mariadb-account-create-update/0.log" Jan 27 08:17:33 crc kubenswrapper[4787]: I0127 08:17:33.239030 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-678dff9ff-k6jwl_91494b32-1284-49fb-b548-3fbc5f1e1ddf/keystone-api/0.log" Jan 27 08:17:33 crc kubenswrapper[4787]: I0127 08:17:33.769839 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-bootstrap-bn5rd_6d885638-7008-41c1-85fc-43025469e404/keystone-bootstrap/0.log" Jan 27 08:17:34 crc kubenswrapper[4787]: I0127 08:17:34.295668 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-db-create-wjmqh_205e2c3c-82ef-4aaa-b1b3-0465a855bde9/mariadb-database-create/0.log" Jan 27 08:17:34 crc kubenswrapper[4787]: I0127 08:17:34.797457 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-db-sync-ztn8q_c5da7883-eba0-4fa4-92c5-9085b0ce0090/keystone-db-sync/0.log" Jan 27 08:17:35 crc kubenswrapper[4787]: I0127 08:17:35.456519 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_memcached-0_b0fc932b-0d21-426c-9b58-15c14ad9e95b/memcached/0.log" Jan 27 08:17:36 crc kubenswrapper[4787]: I0127 08:17:36.014852 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-cell1-galera-0_841b999e-6e42-4f28-8fd8-334f69c3e3e6/galera/0.log" Jan 27 08:17:36 crc kubenswrapper[4787]: I0127 08:17:36.547637 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-galera-0_144491fe-49b9-4a76-8da2-db798cf6a1e4/galera/0.log" Jan 27 08:17:37 crc kubenswrapper[4787]: I0127 08:17:37.073199 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstackclient_4826cdba-006d-447f-a206-367bd1dc8893/openstackclient/0.log" Jan 27 08:17:37 crc kubenswrapper[4787]: I0127 08:17:37.660253 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-545ff75684-kqpgf_7d647302-7ff9-465d-b5a5-a3a76f35df28/placement-log/0.log" Jan 27 08:17:38 crc kubenswrapper[4787]: I0127 08:17:38.218844 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-6d8d-account-create-update-5jvm9_2e3d75aa-1156-48f5-9435-b8b3cd2ec555/mariadb-account-create-update/0.log" Jan 27 08:17:38 crc kubenswrapper[4787]: I0127 08:17:38.663381 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-db-create-8rt5v_7ba9e1b4-f385-47f9-80f6-508a3406aa2a/mariadb-database-create/0.log" Jan 27 08:17:39 crc kubenswrapper[4787]: I0127 08:17:39.096838 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-db-sync-7x42z_54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8/placement-db-sync/0.log" Jan 27 08:17:39 crc kubenswrapper[4787]: I0127 08:17:39.547275 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-broadcaster-server-0_68be2504-1de8-427c-9937-bd0428f7c5c4/rabbitmq/0.log" Jan 27 08:17:39 crc kubenswrapper[4787]: I0127 08:17:39.987762 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-cell1-server-0_faff0a15-880a-4cf7-a0e0-81d573ace274/rabbitmq/0.log" Jan 27 08:17:40 crc kubenswrapper[4787]: I0127 08:17:40.394888 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-notifications-server-0_1d9a6f95-2510-4e74-b4f7-fb592d761c91/rabbitmq/0.log" Jan 27 08:17:40 crc kubenswrapper[4787]: I0127 08:17:40.852498 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-server-0_f84bfff4-b4f2-40d8-8b81-a9e5eb776442/rabbitmq/0.log" Jan 27 08:17:41 crc kubenswrapper[4787]: I0127 08:17:41.315653 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_root-account-create-update-b4lsd_c3150913-c395-4ed0-a5d3-616426f58671/mariadb-account-create-update/0.log" Jan 27 08:18:12 crc kubenswrapper[4787]: I0127 08:18:12.680090 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2_46a8c934-ec5d-48f9-8c39-f4e4c730d7a5/extract/0.log" Jan 27 08:18:13 crc kubenswrapper[4787]: I0127 08:18:13.099649 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-65ff799cfd-d5r5z_4b5515cb-f539-4a9b-8c46-36c5c62c5c93/manager/0.log" Jan 27 08:18:13 crc kubenswrapper[4787]: I0127 08:18:13.523475 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-655bf9cfbb-6tnpg_85bafad5-30f3-4931-bd2a-0d45e2b0f844/manager/0.log" Jan 27 08:18:13 crc kubenswrapper[4787]: I0127 08:18:13.914225 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-77554cdc5c-627g6_dd8dbc24-8fb8-4ae2-96c3-b7112c7d7c65/manager/0.log" Jan 27 08:18:14 crc kubenswrapper[4787]: I0127 08:18:14.340827 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g_f63e7c4f-d194-4209-a8ac-f67c9dc7dde1/extract/0.log" Jan 27 08:18:14 crc kubenswrapper[4787]: I0127 08:18:14.822837 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-67dd55ff59-kk6mj_be4be3e5-1589-4e84-9d35-f104bc3d5ad4/manager/0.log" Jan 27 08:18:15 crc kubenswrapper[4787]: I0127 08:18:15.214708 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-575ffb885b-57zhq_f621b9f3-ead9-4fab-b33f-f9d6179e8f3f/manager/0.log" Jan 27 08:18:15 crc kubenswrapper[4787]: I0127 08:18:15.601152 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-vkmnm_a21cc0b9-75d2-4ddb-925e-23eeaac2dd35/manager/0.log" Jan 27 08:18:16 crc kubenswrapper[4787]: I0127 08:18:16.090095 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d75bc88d5-drtqw_c634491f-6069-472d-bff7-d8903d0afa1d/manager/0.log" Jan 27 08:18:16 crc kubenswrapper[4787]: I0127 08:18:16.539510 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-768b776ffb-j7klk_ff52e5a2-2aab-451a-8869-72c1a506940a/manager/0.log" Jan 27 08:18:16 crc kubenswrapper[4787]: I0127 08:18:16.994374 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-55f684fd56-2k8pd_32019c4d-61e3-4c27-86d4-3a79bb40ce70/manager/0.log" Jan 27 08:18:17 crc kubenswrapper[4787]: I0127 08:18:17.380005 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-849fcfbb6b-tbr5m_59efc0ff-5727-48f9-91b7-36533ba5f94a/manager/0.log" Jan 27 08:18:17 crc kubenswrapper[4787]: I0127 08:18:17.782357 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6b9fb5fdcb-88gn4_e59e94f5-9033-488d-a186-476cb6cbb3f0/manager/0.log" Jan 27 08:18:18 crc kubenswrapper[4787]: I0127 08:18:18.184038 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-7ffd8d76d4-vk5vt_be114c8e-3aa0-41b4-9954-d07c800d3cfc/manager/0.log" Jan 27 08:18:18 crc kubenswrapper[4787]: I0127 08:18:18.621193 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57d6b69d8b-j9nxt_b0a80c53-be1f-444d-a49a-05c89daad297/manager/0.log" Jan 27 08:18:19 crc kubenswrapper[4787]: I0127 08:18:19.024369 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-index-2jn9v_804d50f1-7da5-4d0c-8c0e-cac4c5e5a244/registry-server/0.log" Jan 27 08:18:19 crc kubenswrapper[4787]: I0127 08:18:19.442853 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7875d7675-x4nz5_601fe40b-5553-4225-b5bd-214428d6fa68/manager/0.log" Jan 27 08:18:19 crc kubenswrapper[4787]: I0127 08:18:19.883539 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq_116b0532-a8d5-47ec-8e12-e7ef482094d6/manager/0.log" Jan 27 08:18:20 crc kubenswrapper[4787]: I0127 08:18:20.453080 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-c595dbb59-xc2xn_7a0ecc5c-eebe-457b-9a72-804226878c7f/manager/0.log" Jan 27 08:18:20 crc kubenswrapper[4787]: I0127 08:18:20.876912 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-cnfx4_ba39dacd-d1da-4a2b-a4b9-fa0e917986f0/registry-server/0.log" Jan 27 08:18:21 crc kubenswrapper[4787]: I0127 08:18:21.299924 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6f75f45d54-l9mc7_7cae616e-6aa8-405f-b10a-2a5346fae5b4/manager/0.log" Jan 27 08:18:21 crc kubenswrapper[4787]: I0127 08:18:21.735033 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-79d5ccc684-qhnz4_a552e9e7-f9fa-4b71-9ec6-848a2230279f/manager/0.log" Jan 27 08:18:22 crc kubenswrapper[4787]: I0127 08:18:22.127157 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-5lxsh_2cf3085a-51c4-42a7-a788-8e9dd9dbe4c7/operator/0.log" Jan 27 08:18:22 crc kubenswrapper[4787]: I0127 08:18:22.526247 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-pxbjw_89d7a7e7-443a-486f-b8b7-a024082b9fba/manager/0.log" Jan 27 08:18:22 crc kubenswrapper[4787]: I0127 08:18:22.979603 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-799bc87c89-cqpjk_7986c3dd-6d06-4892-9fed-1b396669685b/manager/0.log" Jan 27 08:18:23 crc kubenswrapper[4787]: I0127 08:18:23.428194 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-dnxpj_7ec92388-f799-43cc-9235-6d4b717bc98e/manager/0.log" Jan 27 08:18:23 crc kubenswrapper[4787]: I0127 08:18:23.878443 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75db85654f-8cmb9_292c617a-b64f-4b53-b05e-360769308e43/manager/0.log" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.374309 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zjjpz/must-gather-hldr6"] Jan 27 08:18:46 crc kubenswrapper[4787]: E0127 08:18:46.375233 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4fe0298-627e-4958-8321-0ee99f5aa127" containerName="extract-utilities" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.375247 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4fe0298-627e-4958-8321-0ee99f5aa127" containerName="extract-utilities" Jan 27 08:18:46 crc kubenswrapper[4787]: E0127 08:18:46.375260 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4fe0298-627e-4958-8321-0ee99f5aa127" containerName="registry-server" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.375266 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4fe0298-627e-4958-8321-0ee99f5aa127" containerName="registry-server" Jan 27 08:18:46 crc kubenswrapper[4787]: E0127 08:18:46.375283 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4fe0298-627e-4958-8321-0ee99f5aa127" containerName="extract-content" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.375289 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4fe0298-627e-4958-8321-0ee99f5aa127" containerName="extract-content" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.375426 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4fe0298-627e-4958-8321-0ee99f5aa127" containerName="registry-server" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.376350 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zjjpz/must-gather-hldr6" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.379247 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-zjjpz"/"openshift-service-ca.crt" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.379498 4787 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-zjjpz"/"kube-root-ca.crt" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.448117 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-zjjpz/must-gather-hldr6"] Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.495872 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrwnk\" (UniqueName: \"kubernetes.io/projected/8f8dfa07-27b1-450f-9019-56d0ced6d238-kube-api-access-lrwnk\") pod \"must-gather-hldr6\" (UID: \"8f8dfa07-27b1-450f-9019-56d0ced6d238\") " pod="openshift-must-gather-zjjpz/must-gather-hldr6" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.495960 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8f8dfa07-27b1-450f-9019-56d0ced6d238-must-gather-output\") pod \"must-gather-hldr6\" (UID: \"8f8dfa07-27b1-450f-9019-56d0ced6d238\") " pod="openshift-must-gather-zjjpz/must-gather-hldr6" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.598471 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrwnk\" (UniqueName: \"kubernetes.io/projected/8f8dfa07-27b1-450f-9019-56d0ced6d238-kube-api-access-lrwnk\") pod \"must-gather-hldr6\" (UID: \"8f8dfa07-27b1-450f-9019-56d0ced6d238\") " pod="openshift-must-gather-zjjpz/must-gather-hldr6" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.598608 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8f8dfa07-27b1-450f-9019-56d0ced6d238-must-gather-output\") pod \"must-gather-hldr6\" (UID: \"8f8dfa07-27b1-450f-9019-56d0ced6d238\") " pod="openshift-must-gather-zjjpz/must-gather-hldr6" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.599161 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8f8dfa07-27b1-450f-9019-56d0ced6d238-must-gather-output\") pod \"must-gather-hldr6\" (UID: \"8f8dfa07-27b1-450f-9019-56d0ced6d238\") " pod="openshift-must-gather-zjjpz/must-gather-hldr6" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.622303 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrwnk\" (UniqueName: \"kubernetes.io/projected/8f8dfa07-27b1-450f-9019-56d0ced6d238-kube-api-access-lrwnk\") pod \"must-gather-hldr6\" (UID: \"8f8dfa07-27b1-450f-9019-56d0ced6d238\") " pod="openshift-must-gather-zjjpz/must-gather-hldr6" Jan 27 08:18:46 crc kubenswrapper[4787]: I0127 08:18:46.697075 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zjjpz/must-gather-hldr6" Jan 27 08:18:47 crc kubenswrapper[4787]: I0127 08:18:47.177787 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-zjjpz/must-gather-hldr6"] Jan 27 08:18:47 crc kubenswrapper[4787]: I0127 08:18:47.814172 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zjjpz/must-gather-hldr6" event={"ID":"8f8dfa07-27b1-450f-9019-56d0ced6d238","Type":"ContainerStarted","Data":"85e2309321b5bceb60627051cc544310f95b011ce3da3e2ae2a9b84746e8c2f3"} Jan 27 08:18:52 crc kubenswrapper[4787]: I0127 08:18:52.823080 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:18:52 crc kubenswrapper[4787]: I0127 08:18:52.823979 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:18:54 crc kubenswrapper[4787]: I0127 08:18:54.872620 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zjjpz/must-gather-hldr6" event={"ID":"8f8dfa07-27b1-450f-9019-56d0ced6d238","Type":"ContainerStarted","Data":"387fee2205dae8bd6763255730dec63813d6d41dc01c06d500efe409bc6e4380"} Jan 27 08:18:54 crc kubenswrapper[4787]: I0127 08:18:54.873369 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zjjpz/must-gather-hldr6" event={"ID":"8f8dfa07-27b1-450f-9019-56d0ced6d238","Type":"ContainerStarted","Data":"f30e73fea6424d4233a5355b7aa6b2d90d5724e1970661129e89efcc148da156"} Jan 27 08:18:54 crc kubenswrapper[4787]: I0127 08:18:54.886690 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-zjjpz/must-gather-hldr6" podStartSLOduration=2.278290574 podStartE2EDuration="8.88667474s" podCreationTimestamp="2026-01-27 08:18:46 +0000 UTC" firstStartedPulling="2026-01-27 08:18:47.183386081 +0000 UTC m=+1632.835741573" lastFinishedPulling="2026-01-27 08:18:53.791770237 +0000 UTC m=+1639.444125739" observedRunningTime="2026-01-27 08:18:54.885023259 +0000 UTC m=+1640.537378751" watchObservedRunningTime="2026-01-27 08:18:54.88667474 +0000 UTC m=+1640.539030232" Jan 27 08:19:22 crc kubenswrapper[4787]: I0127 08:19:22.822668 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:19:22 crc kubenswrapper[4787]: I0127 08:19:22.823266 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:19:37 crc kubenswrapper[4787]: I0127 08:19:37.053242 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/root-account-create-update-b4lsd"] Jan 27 08:19:37 crc kubenswrapper[4787]: I0127 08:19:37.061777 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/root-account-create-update-b4lsd"] Jan 27 08:19:37 crc kubenswrapper[4787]: I0127 08:19:37.087117 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3150913-c395-4ed0-a5d3-616426f58671" path="/var/lib/kubelet/pods/c3150913-c395-4ed0-a5d3-616426f58671/volumes" Jan 27 08:19:38 crc kubenswrapper[4787]: I0127 08:19:38.028317 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-5828-account-create-update-xh7r6"] Jan 27 08:19:38 crc kubenswrapper[4787]: I0127 08:19:38.036700 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/placement-db-create-8rt5v"] Jan 27 08:19:38 crc kubenswrapper[4787]: I0127 08:19:38.045226 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-db-create-wjmqh"] Jan 27 08:19:38 crc kubenswrapper[4787]: I0127 08:19:38.053410 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/placement-6d8d-account-create-update-5jvm9"] Jan 27 08:19:38 crc kubenswrapper[4787]: I0127 08:19:38.062200 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-5828-account-create-update-xh7r6"] Jan 27 08:19:38 crc kubenswrapper[4787]: I0127 08:19:38.067460 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/placement-db-create-8rt5v"] Jan 27 08:19:38 crc kubenswrapper[4787]: I0127 08:19:38.072542 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/placement-6d8d-account-create-update-5jvm9"] Jan 27 08:19:38 crc kubenswrapper[4787]: I0127 08:19:38.079146 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-db-create-wjmqh"] Jan 27 08:19:39 crc kubenswrapper[4787]: I0127 08:19:39.092138 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="205e2c3c-82ef-4aaa-b1b3-0465a855bde9" path="/var/lib/kubelet/pods/205e2c3c-82ef-4aaa-b1b3-0465a855bde9/volumes" Jan 27 08:19:39 crc kubenswrapper[4787]: I0127 08:19:39.093785 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e3d75aa-1156-48f5-9435-b8b3cd2ec555" path="/var/lib/kubelet/pods/2e3d75aa-1156-48f5-9435-b8b3cd2ec555/volumes" Jan 27 08:19:39 crc kubenswrapper[4787]: I0127 08:19:39.094779 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="646de171-9e06-4df6-9e1e-07319c04b95c" path="/var/lib/kubelet/pods/646de171-9e06-4df6-9e1e-07319c04b95c/volumes" Jan 27 08:19:39 crc kubenswrapper[4787]: I0127 08:19:39.095727 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ba9e1b4-f385-47f9-80f6-508a3406aa2a" path="/var/lib/kubelet/pods/7ba9e1b4-f385-47f9-80f6-508a3406aa2a/volumes" Jan 27 08:19:52 crc kubenswrapper[4787]: I0127 08:19:52.822664 4787 patch_prober.go:28] interesting pod/machine-config-daemon-q4fh5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 08:19:52 crc kubenswrapper[4787]: I0127 08:19:52.823011 4787 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 08:19:52 crc kubenswrapper[4787]: I0127 08:19:52.823063 4787 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" Jan 27 08:19:52 crc kubenswrapper[4787]: I0127 08:19:52.823810 4787 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e"} pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 27 08:19:52 crc kubenswrapper[4787]: I0127 08:19:52.823868 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" containerName="machine-config-daemon" containerID="cri-o://125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" gracePeriod=600 Jan 27 08:19:52 crc kubenswrapper[4787]: E0127 08:19:52.947712 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:19:53 crc kubenswrapper[4787]: I0127 08:19:53.274335 4787 generic.go:334] "Generic (PLEG): container finished" podID="f051e184-acac-47cf-9e04-9df648288715" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" exitCode=0 Jan 27 08:19:53 crc kubenswrapper[4787]: I0127 08:19:53.274388 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerDied","Data":"125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e"} Jan 27 08:19:53 crc kubenswrapper[4787]: I0127 08:19:53.274436 4787 scope.go:117] "RemoveContainer" containerID="66c62556ef822841ab93c99b23fda5b0b3ea129b94ebcca6f4cff0b2d8c7f32e" Jan 27 08:19:53 crc kubenswrapper[4787]: I0127 08:19:53.275038 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:19:53 crc kubenswrapper[4787]: E0127 08:19:53.275376 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:20:00 crc kubenswrapper[4787]: I0127 08:20:00.180126 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2_46a8c934-ec5d-48f9-8c39-f4e4c730d7a5/util/0.log" Jan 27 08:20:00 crc kubenswrapper[4787]: I0127 08:20:00.180130 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2_46a8c934-ec5d-48f9-8c39-f4e4c730d7a5/util/0.log" Jan 27 08:20:00 crc kubenswrapper[4787]: I0127 08:20:00.181035 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2_46a8c934-ec5d-48f9-8c39-f4e4c730d7a5/pull/0.log" Jan 27 08:20:00 crc kubenswrapper[4787]: I0127 08:20:00.276710 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2_46a8c934-ec5d-48f9-8c39-f4e4c730d7a5/pull/0.log" Jan 27 08:20:00 crc kubenswrapper[4787]: I0127 08:20:00.514562 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2_46a8c934-ec5d-48f9-8c39-f4e4c730d7a5/util/0.log" Jan 27 08:20:00 crc kubenswrapper[4787]: I0127 08:20:00.519129 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2_46a8c934-ec5d-48f9-8c39-f4e4c730d7a5/extract/0.log" Jan 27 08:20:00 crc kubenswrapper[4787]: I0127 08:20:00.578289 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_666ed4883fb88ff51f70cbd0ee285bf2a10ae3509c94c96f2548a58c3bzxmp2_46a8c934-ec5d-48f9-8c39-f4e4c730d7a5/pull/0.log" Jan 27 08:20:00 crc kubenswrapper[4787]: I0127 08:20:00.738230 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-65ff799cfd-d5r5z_4b5515cb-f539-4a9b-8c46-36c5c62c5c93/manager/0.log" Jan 27 08:20:00 crc kubenswrapper[4787]: I0127 08:20:00.775464 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-655bf9cfbb-6tnpg_85bafad5-30f3-4931-bd2a-0d45e2b0f844/manager/0.log" Jan 27 08:20:00 crc kubenswrapper[4787]: I0127 08:20:00.948261 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-77554cdc5c-627g6_dd8dbc24-8fb8-4ae2-96c3-b7112c7d7c65/manager/0.log" Jan 27 08:20:00 crc kubenswrapper[4787]: I0127 08:20:00.979720 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g_f63e7c4f-d194-4209-a8ac-f67c9dc7dde1/util/0.log" Jan 27 08:20:01 crc kubenswrapper[4787]: I0127 08:20:01.195812 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g_f63e7c4f-d194-4209-a8ac-f67c9dc7dde1/pull/0.log" Jan 27 08:20:01 crc kubenswrapper[4787]: I0127 08:20:01.202813 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g_f63e7c4f-d194-4209-a8ac-f67c9dc7dde1/util/0.log" Jan 27 08:20:01 crc kubenswrapper[4787]: I0127 08:20:01.232726 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g_f63e7c4f-d194-4209-a8ac-f67c9dc7dde1/pull/0.log" Jan 27 08:20:01 crc kubenswrapper[4787]: I0127 08:20:01.677676 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g_f63e7c4f-d194-4209-a8ac-f67c9dc7dde1/pull/0.log" Jan 27 08:20:01 crc kubenswrapper[4787]: I0127 08:20:01.681569 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g_f63e7c4f-d194-4209-a8ac-f67c9dc7dde1/util/0.log" Jan 27 08:20:01 crc kubenswrapper[4787]: I0127 08:20:01.743802 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fd3dfdf443b13861ec8513a19ccfc4147bdcd20f329cc4d382b7652cb7zp96g_f63e7c4f-d194-4209-a8ac-f67c9dc7dde1/extract/0.log" Jan 27 08:20:01 crc kubenswrapper[4787]: I0127 08:20:01.841841 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-67dd55ff59-kk6mj_be4be3e5-1589-4e84-9d35-f104bc3d5ad4/manager/0.log" Jan 27 08:20:01 crc kubenswrapper[4787]: I0127 08:20:01.933411 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-575ffb885b-57zhq_f621b9f3-ead9-4fab-b33f-f9d6179e8f3f/manager/0.log" Jan 27 08:20:02 crc kubenswrapper[4787]: I0127 08:20:02.072526 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-vkmnm_a21cc0b9-75d2-4ddb-925e-23eeaac2dd35/manager/0.log" Jan 27 08:20:02 crc kubenswrapper[4787]: I0127 08:20:02.268488 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d75bc88d5-drtqw_c634491f-6069-472d-bff7-d8903d0afa1d/manager/0.log" Jan 27 08:20:02 crc kubenswrapper[4787]: I0127 08:20:02.405102 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-768b776ffb-j7klk_ff52e5a2-2aab-451a-8869-72c1a506940a/manager/0.log" Jan 27 08:20:02 crc kubenswrapper[4787]: I0127 08:20:02.566610 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-55f684fd56-2k8pd_32019c4d-61e3-4c27-86d4-3a79bb40ce70/manager/0.log" Jan 27 08:20:02 crc kubenswrapper[4787]: I0127 08:20:02.633989 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-849fcfbb6b-tbr5m_59efc0ff-5727-48f9-91b7-36533ba5f94a/manager/0.log" Jan 27 08:20:02 crc kubenswrapper[4787]: I0127 08:20:02.795914 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6b9fb5fdcb-88gn4_e59e94f5-9033-488d-a186-476cb6cbb3f0/manager/0.log" Jan 27 08:20:02 crc kubenswrapper[4787]: I0127 08:20:02.909449 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-7ffd8d76d4-vk5vt_be114c8e-3aa0-41b4-9954-d07c800d3cfc/manager/0.log" Jan 27 08:20:03 crc kubenswrapper[4787]: I0127 08:20:03.045416 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57d6b69d8b-j9nxt_b0a80c53-be1f-444d-a49a-05c89daad297/manager/0.log" Jan 27 08:20:03 crc kubenswrapper[4787]: I0127 08:20:03.170690 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-index-2jn9v_804d50f1-7da5-4d0c-8c0e-cac4c5e5a244/registry-server/0.log" Jan 27 08:20:03 crc kubenswrapper[4787]: I0127 08:20:03.302032 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7875d7675-x4nz5_601fe40b-5553-4225-b5bd-214428d6fa68/manager/0.log" Jan 27 08:20:03 crc kubenswrapper[4787]: I0127 08:20:03.380539 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6b68b8b854cf9xq_116b0532-a8d5-47ec-8e12-e7ef482094d6/manager/0.log" Jan 27 08:20:03 crc kubenswrapper[4787]: I0127 08:20:03.645740 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-c595dbb59-xc2xn_7a0ecc5c-eebe-457b-9a72-804226878c7f/manager/0.log" Jan 27 08:20:03 crc kubenswrapper[4787]: I0127 08:20:03.792764 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-cnfx4_ba39dacd-d1da-4a2b-a4b9-fa0e917986f0/registry-server/0.log" Jan 27 08:20:03 crc kubenswrapper[4787]: I0127 08:20:03.899842 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6f75f45d54-l9mc7_7cae616e-6aa8-405f-b10a-2a5346fae5b4/manager/0.log" Jan 27 08:20:03 crc kubenswrapper[4787]: I0127 08:20:03.983471 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-79d5ccc684-qhnz4_a552e9e7-f9fa-4b71-9ec6-848a2230279f/manager/0.log" Jan 27 08:20:04 crc kubenswrapper[4787]: I0127 08:20:04.084682 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-5lxsh_2cf3085a-51c4-42a7-a788-8e9dd9dbe4c7/operator/0.log" Jan 27 08:20:04 crc kubenswrapper[4787]: I0127 08:20:04.205175 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-pxbjw_89d7a7e7-443a-486f-b8b7-a024082b9fba/manager/0.log" Jan 27 08:20:04 crc kubenswrapper[4787]: I0127 08:20:04.309876 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-799bc87c89-cqpjk_7986c3dd-6d06-4892-9fed-1b396669685b/manager/0.log" Jan 27 08:20:04 crc kubenswrapper[4787]: I0127 08:20:04.397288 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-dnxpj_7ec92388-f799-43cc-9235-6d4b717bc98e/manager/0.log" Jan 27 08:20:04 crc kubenswrapper[4787]: I0127 08:20:04.582809 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75db85654f-8cmb9_292c617a-b64f-4b53-b05e-360769308e43/manager/0.log" Jan 27 08:20:08 crc kubenswrapper[4787]: I0127 08:20:08.076294 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:20:08 crc kubenswrapper[4787]: E0127 08:20:08.078005 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:20:17 crc kubenswrapper[4787]: I0127 08:20:17.041432 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-db-sync-ztn8q"] Jan 27 08:20:17 crc kubenswrapper[4787]: I0127 08:20:17.047380 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-db-sync-ztn8q"] Jan 27 08:20:17 crc kubenswrapper[4787]: I0127 08:20:17.085204 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5da7883-eba0-4fa4-92c5-9085b0ce0090" path="/var/lib/kubelet/pods/c5da7883-eba0-4fa4-92c5-9085b0ce0090/volumes" Jan 27 08:20:19 crc kubenswrapper[4787]: I0127 08:20:19.076584 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:20:19 crc kubenswrapper[4787]: E0127 08:20:19.077166 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:20:25 crc kubenswrapper[4787]: I0127 08:20:25.542254 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-wdvfw_c71976d9-f7ec-4258-8bf5-08a526607fd9/control-plane-machine-set-operator/0.log" Jan 27 08:20:25 crc kubenswrapper[4787]: I0127 08:20:25.709464 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-qtkc8_3ef9e285-88a8-499d-8fb8-e4c882336e68/kube-rbac-proxy/0.log" Jan 27 08:20:25 crc kubenswrapper[4787]: I0127 08:20:25.725370 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-qtkc8_3ef9e285-88a8-499d-8fb8-e4c882336e68/machine-api-operator/0.log" Jan 27 08:20:26 crc kubenswrapper[4787]: I0127 08:20:26.025238 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/placement-db-sync-7x42z"] Jan 27 08:20:26 crc kubenswrapper[4787]: I0127 08:20:26.035221 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/placement-db-sync-7x42z"] Jan 27 08:20:27 crc kubenswrapper[4787]: I0127 08:20:27.085222 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8" path="/var/lib/kubelet/pods/54e9c344-d7c0-4bcc-97f5-2ad0b8ee86d8/volumes" Jan 27 08:20:32 crc kubenswrapper[4787]: I0127 08:20:32.031455 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-bn5rd"] Jan 27 08:20:32 crc kubenswrapper[4787]: I0127 08:20:32.037242 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-bn5rd"] Jan 27 08:20:33 crc kubenswrapper[4787]: I0127 08:20:33.092244 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d885638-7008-41c1-85fc-43025469e404" path="/var/lib/kubelet/pods/6d885638-7008-41c1-85fc-43025469e404/volumes" Jan 27 08:20:34 crc kubenswrapper[4787]: I0127 08:20:34.076942 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:20:34 crc kubenswrapper[4787]: E0127 08:20:34.077308 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:20:36 crc kubenswrapper[4787]: I0127 08:20:36.024217 4787 scope.go:117] "RemoveContainer" containerID="3f06a186b769d4a1f5e238fd19184a35781ff17d23f2779b2c34abe34bf41b44" Jan 27 08:20:36 crc kubenswrapper[4787]: I0127 08:20:36.045685 4787 scope.go:117] "RemoveContainer" containerID="b5b33f88492ce6991e1e36a7024a76b4049def405dfe4dc15a1159b39d6f6633" Jan 27 08:20:36 crc kubenswrapper[4787]: I0127 08:20:36.088179 4787 scope.go:117] "RemoveContainer" containerID="9ab563345b762a72fbe3b47df8d23be9db6758acc4bcc324921200a4b4455e30" Jan 27 08:20:36 crc kubenswrapper[4787]: I0127 08:20:36.108118 4787 scope.go:117] "RemoveContainer" containerID="a478f74f2f9db06c59e7ba51e2be735e9b9fb9d652017f85e3a9daea1ac8536e" Jan 27 08:20:36 crc kubenswrapper[4787]: I0127 08:20:36.137586 4787 scope.go:117] "RemoveContainer" containerID="2e3468c57e9613d84bee598f4f0c5a4b7e44d892a408fe4d11cddeffe9d5b60b" Jan 27 08:20:36 crc kubenswrapper[4787]: I0127 08:20:36.169182 4787 scope.go:117] "RemoveContainer" containerID="dcf70ae1779bf3b7d74781efddb0114399b6ad2886db99535b6b73b54d7d6a8a" Jan 27 08:20:36 crc kubenswrapper[4787]: I0127 08:20:36.211193 4787 scope.go:117] "RemoveContainer" containerID="144343248d2b16217cf2dcb9392f4fccabd0a94e089b16a6bacae928d2fbb414" Jan 27 08:20:36 crc kubenswrapper[4787]: I0127 08:20:36.238771 4787 scope.go:117] "RemoveContainer" containerID="fdbb6282b854859c8ab712b9eae897a43f0552adf90b843b092e2da77b58ae00" Jan 27 08:20:38 crc kubenswrapper[4787]: I0127 08:20:38.678407 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-vbzqb_2219abac-7659-407a-b36f-cd0160e70c25/cert-manager-controller/0.log" Jan 27 08:20:38 crc kubenswrapper[4787]: I0127 08:20:38.902063 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-k4x6h_56feb43c-1e68-4770-94c0-e22943ca4313/cert-manager-cainjector/0.log" Jan 27 08:20:38 crc kubenswrapper[4787]: I0127 08:20:38.922183 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-lfhld_aff129cd-50c9-4f2a-b70d-d2066197a73b/cert-manager-webhook/0.log" Jan 27 08:20:49 crc kubenswrapper[4787]: I0127 08:20:49.078896 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:20:49 crc kubenswrapper[4787]: E0127 08:20:49.082642 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:20:52 crc kubenswrapper[4787]: I0127 08:20:52.192302 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-7qxzb_51c3dc19-29a5-45ab-a384-fb7be3aa3f53/nmstate-console-plugin/0.log" Jan 27 08:20:52 crc kubenswrapper[4787]: I0127 08:20:52.334623 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-fjcwr_0131f9ae-88a6-41c3-8c76-6b2676d5e87b/nmstate-handler/0.log" Jan 27 08:20:52 crc kubenswrapper[4787]: I0127 08:20:52.388246 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-b8zkq_86b956d1-8553-4624-8324-f0a65d627e10/kube-rbac-proxy/0.log" Jan 27 08:20:52 crc kubenswrapper[4787]: I0127 08:20:52.569157 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-b8zkq_86b956d1-8553-4624-8324-f0a65d627e10/nmstate-metrics/0.log" Jan 27 08:20:52 crc kubenswrapper[4787]: I0127 08:20:52.617069 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-zxwfx_d9fe5568-1516-4ee4-8ca1-90f4ea58ba3e/nmstate-operator/0.log" Jan 27 08:20:52 crc kubenswrapper[4787]: I0127 08:20:52.744199 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-n6mzk_29bc137f-2121-451f-9684-2b4209b38c32/nmstate-webhook/0.log" Jan 27 08:21:03 crc kubenswrapper[4787]: I0127 08:21:03.076465 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:21:03 crc kubenswrapper[4787]: E0127 08:21:03.077301 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:21:17 crc kubenswrapper[4787]: I0127 08:21:17.077540 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:21:17 crc kubenswrapper[4787]: E0127 08:21:17.078664 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:21:21 crc kubenswrapper[4787]: I0127 08:21:21.714733 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-k24s7_f9b9a161-da5c-416b-9713-5fb85ee005fb/kube-rbac-proxy/0.log" Jan 27 08:21:21 crc kubenswrapper[4787]: I0127 08:21:21.832506 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-k24s7_f9b9a161-da5c-416b-9713-5fb85ee005fb/controller/0.log" Jan 27 08:21:21 crc kubenswrapper[4787]: I0127 08:21:21.962668 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/cp-frr-files/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.131718 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/cp-frr-files/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.132100 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/cp-reloader/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.132130 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/cp-reloader/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.181040 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/cp-metrics/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.365469 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/cp-reloader/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.372053 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/cp-metrics/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.398432 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/cp-frr-files/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.408153 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/cp-metrics/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.526355 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/cp-frr-files/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.552582 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/cp-reloader/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.571303 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/cp-metrics/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.608380 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/controller/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.754058 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/frr-metrics/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.793782 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/kube-rbac-proxy/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.829206 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/kube-rbac-proxy-frr/0.log" Jan 27 08:21:22 crc kubenswrapper[4787]: I0127 08:21:22.961875 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/reloader/0.log" Jan 27 08:21:23 crc kubenswrapper[4787]: I0127 08:21:23.005140 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pp6fl_49a597ca-3bfc-4377-a49b-19337d609659/frr/0.log" Jan 27 08:21:23 crc kubenswrapper[4787]: I0127 08:21:23.045169 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-vfl8q_360229c9-082c-4e85-8800-c5f8717fd8c4/frr-k8s-webhook-server/0.log" Jan 27 08:21:23 crc kubenswrapper[4787]: I0127 08:21:23.170023 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-555548cdf7-hwkjk_6d5bf055-d90c-451f-aaf7-19140fcea291/manager/0.log" Jan 27 08:21:23 crc kubenswrapper[4787]: I0127 08:21:23.261167 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-78544bb5fb-sxw5f_a5c49848-4aac-4efa-8acf-7edcee5c2093/webhook-server/0.log" Jan 27 08:21:23 crc kubenswrapper[4787]: I0127 08:21:23.375814 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-t2fkx_f5e61cdf-e660-42e7-b43c-42afb781223b/kube-rbac-proxy/0.log" Jan 27 08:21:23 crc kubenswrapper[4787]: I0127 08:21:23.566074 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-t2fkx_f5e61cdf-e660-42e7-b43c-42afb781223b/speaker/0.log" Jan 27 08:21:32 crc kubenswrapper[4787]: I0127 08:21:32.076767 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:21:32 crc kubenswrapper[4787]: E0127 08:21:32.077690 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:21:38 crc kubenswrapper[4787]: I0127 08:21:38.947437 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-678dff9ff-k6jwl_91494b32-1284-49fb-b548-3fbc5f1e1ddf/keystone-api/0.log" Jan 27 08:21:39 crc kubenswrapper[4787]: I0127 08:21:39.238614 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-cell1-galera-0_841b999e-6e42-4f28-8fd8-334f69c3e3e6/mysql-bootstrap/0.log" Jan 27 08:21:39 crc kubenswrapper[4787]: I0127 08:21:39.257637 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_memcached-0_b0fc932b-0d21-426c-9b58-15c14ad9e95b/memcached/0.log" Jan 27 08:21:39 crc kubenswrapper[4787]: I0127 08:21:39.400050 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-cell1-galera-0_841b999e-6e42-4f28-8fd8-334f69c3e3e6/mysql-bootstrap/0.log" Jan 27 08:21:39 crc kubenswrapper[4787]: I0127 08:21:39.447189 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-cell1-galera-0_841b999e-6e42-4f28-8fd8-334f69c3e3e6/galera/0.log" Jan 27 08:21:39 crc kubenswrapper[4787]: I0127 08:21:39.510665 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-galera-0_144491fe-49b9-4a76-8da2-db798cf6a1e4/mysql-bootstrap/0.log" Jan 27 08:21:39 crc kubenswrapper[4787]: I0127 08:21:39.677619 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-galera-0_144491fe-49b9-4a76-8da2-db798cf6a1e4/mysql-bootstrap/0.log" Jan 27 08:21:39 crc kubenswrapper[4787]: I0127 08:21:39.689098 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-galera-0_144491fe-49b9-4a76-8da2-db798cf6a1e4/galera/0.log" Jan 27 08:21:39 crc kubenswrapper[4787]: I0127 08:21:39.721698 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstackclient_4826cdba-006d-447f-a206-367bd1dc8893/openstackclient/0.log" Jan 27 08:21:39 crc kubenswrapper[4787]: I0127 08:21:39.901485 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-545ff75684-kqpgf_7d647302-7ff9-465d-b5a5-a3a76f35df28/placement-api/0.log" Jan 27 08:21:39 crc kubenswrapper[4787]: I0127 08:21:39.989099 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-545ff75684-kqpgf_7d647302-7ff9-465d-b5a5-a3a76f35df28/placement-log/0.log" Jan 27 08:21:40 crc kubenswrapper[4787]: I0127 08:21:40.094587 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-broadcaster-server-0_68be2504-1de8-427c-9937-bd0428f7c5c4/setup-container/0.log" Jan 27 08:21:40 crc kubenswrapper[4787]: I0127 08:21:40.276800 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-broadcaster-server-0_68be2504-1de8-427c-9937-bd0428f7c5c4/setup-container/0.log" Jan 27 08:21:40 crc kubenswrapper[4787]: I0127 08:21:40.332062 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-broadcaster-server-0_68be2504-1de8-427c-9937-bd0428f7c5c4/rabbitmq/0.log" Jan 27 08:21:40 crc kubenswrapper[4787]: I0127 08:21:40.380988 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-cell1-server-0_faff0a15-880a-4cf7-a0e0-81d573ace274/setup-container/0.log" Jan 27 08:21:40 crc kubenswrapper[4787]: I0127 08:21:40.535455 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-cell1-server-0_faff0a15-880a-4cf7-a0e0-81d573ace274/rabbitmq/0.log" Jan 27 08:21:40 crc kubenswrapper[4787]: I0127 08:21:40.564363 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-cell1-server-0_faff0a15-880a-4cf7-a0e0-81d573ace274/setup-container/0.log" Jan 27 08:21:40 crc kubenswrapper[4787]: I0127 08:21:40.617740 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-notifications-server-0_1d9a6f95-2510-4e74-b4f7-fb592d761c91/setup-container/0.log" Jan 27 08:21:40 crc kubenswrapper[4787]: I0127 08:21:40.787169 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-notifications-server-0_1d9a6f95-2510-4e74-b4f7-fb592d761c91/setup-container/0.log" Jan 27 08:21:40 crc kubenswrapper[4787]: I0127 08:21:40.834377 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-notifications-server-0_1d9a6f95-2510-4e74-b4f7-fb592d761c91/rabbitmq/0.log" Jan 27 08:21:40 crc kubenswrapper[4787]: I0127 08:21:40.834732 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-server-0_f84bfff4-b4f2-40d8-8b81-a9e5eb776442/setup-container/0.log" Jan 27 08:21:41 crc kubenswrapper[4787]: I0127 08:21:41.073664 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-server-0_f84bfff4-b4f2-40d8-8b81-a9e5eb776442/setup-container/0.log" Jan 27 08:21:41 crc kubenswrapper[4787]: I0127 08:21:41.076580 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-server-0_f84bfff4-b4f2-40d8-8b81-a9e5eb776442/rabbitmq/0.log" Jan 27 08:21:47 crc kubenswrapper[4787]: I0127 08:21:47.076783 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:21:47 crc kubenswrapper[4787]: E0127 08:21:47.077348 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:21:56 crc kubenswrapper[4787]: I0127 08:21:56.187348 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq_9ba6b24a-de23-4528-87fc-c2932e3beb6e/util/0.log" Jan 27 08:21:56 crc kubenswrapper[4787]: I0127 08:21:56.448261 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq_9ba6b24a-de23-4528-87fc-c2932e3beb6e/util/0.log" Jan 27 08:21:56 crc kubenswrapper[4787]: I0127 08:21:56.489358 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq_9ba6b24a-de23-4528-87fc-c2932e3beb6e/pull/0.log" Jan 27 08:21:56 crc kubenswrapper[4787]: I0127 08:21:56.574273 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq_9ba6b24a-de23-4528-87fc-c2932e3beb6e/pull/0.log" Jan 27 08:21:56 crc kubenswrapper[4787]: I0127 08:21:56.664168 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq_9ba6b24a-de23-4528-87fc-c2932e3beb6e/util/0.log" Jan 27 08:21:56 crc kubenswrapper[4787]: I0127 08:21:56.738902 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq_9ba6b24a-de23-4528-87fc-c2932e3beb6e/extract/0.log" Jan 27 08:21:56 crc kubenswrapper[4787]: I0127 08:21:56.779744 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931av6xrq_9ba6b24a-de23-4528-87fc-c2932e3beb6e/pull/0.log" Jan 27 08:21:56 crc kubenswrapper[4787]: I0127 08:21:56.858832 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77_db32e829-ff6c-4e31-bbc4-5291eb8127d3/util/0.log" Jan 27 08:21:57 crc kubenswrapper[4787]: I0127 08:21:57.079370 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77_db32e829-ff6c-4e31-bbc4-5291eb8127d3/pull/0.log" Jan 27 08:21:57 crc kubenswrapper[4787]: I0127 08:21:57.095200 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77_db32e829-ff6c-4e31-bbc4-5291eb8127d3/pull/0.log" Jan 27 08:21:57 crc kubenswrapper[4787]: I0127 08:21:57.103122 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77_db32e829-ff6c-4e31-bbc4-5291eb8127d3/util/0.log" Jan 27 08:21:57 crc kubenswrapper[4787]: I0127 08:21:57.290941 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77_db32e829-ff6c-4e31-bbc4-5291eb8127d3/pull/0.log" Jan 27 08:21:57 crc kubenswrapper[4787]: I0127 08:21:57.315988 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77_db32e829-ff6c-4e31-bbc4-5291eb8127d3/util/0.log" Jan 27 08:21:57 crc kubenswrapper[4787]: I0127 08:21:57.384935 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc44q77_db32e829-ff6c-4e31-bbc4-5291eb8127d3/extract/0.log" Jan 27 08:21:57 crc kubenswrapper[4787]: I0127 08:21:57.482253 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v_b105de51-099b-4b81-b99d-6aa63d8821ae/util/0.log" Jan 27 08:21:57 crc kubenswrapper[4787]: I0127 08:21:57.643651 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v_b105de51-099b-4b81-b99d-6aa63d8821ae/util/0.log" Jan 27 08:21:57 crc kubenswrapper[4787]: I0127 08:21:57.674791 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v_b105de51-099b-4b81-b99d-6aa63d8821ae/pull/0.log" Jan 27 08:21:57 crc kubenswrapper[4787]: I0127 08:21:57.675629 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v_b105de51-099b-4b81-b99d-6aa63d8821ae/pull/0.log" Jan 27 08:21:57 crc kubenswrapper[4787]: I0127 08:21:57.881904 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v_b105de51-099b-4b81-b99d-6aa63d8821ae/util/0.log" Jan 27 08:21:57 crc kubenswrapper[4787]: I0127 08:21:57.882681 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v_b105de51-099b-4b81-b99d-6aa63d8821ae/extract/0.log" Jan 27 08:21:57 crc kubenswrapper[4787]: I0127 08:21:57.919381 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71368s4v_b105de51-099b-4b81-b99d-6aa63d8821ae/pull/0.log" Jan 27 08:21:58 crc kubenswrapper[4787]: I0127 08:21:58.092103 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bjssk_56eb4b72-a8dc-4300-882e-e29d83442af5/extract-utilities/0.log" Jan 27 08:21:58 crc kubenswrapper[4787]: I0127 08:21:58.292440 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bjssk_56eb4b72-a8dc-4300-882e-e29d83442af5/extract-utilities/0.log" Jan 27 08:21:58 crc kubenswrapper[4787]: I0127 08:21:58.354355 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bjssk_56eb4b72-a8dc-4300-882e-e29d83442af5/extract-content/0.log" Jan 27 08:21:58 crc kubenswrapper[4787]: I0127 08:21:58.381894 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bjssk_56eb4b72-a8dc-4300-882e-e29d83442af5/extract-content/0.log" Jan 27 08:21:58 crc kubenswrapper[4787]: I0127 08:21:58.472903 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bjssk_56eb4b72-a8dc-4300-882e-e29d83442af5/extract-utilities/0.log" Jan 27 08:21:58 crc kubenswrapper[4787]: I0127 08:21:58.604031 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bjssk_56eb4b72-a8dc-4300-882e-e29d83442af5/extract-content/0.log" Jan 27 08:21:58 crc kubenswrapper[4787]: I0127 08:21:58.803727 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gjx52_6953906d-8a35-4d3e-83a3-3a7451e834cc/extract-utilities/0.log" Jan 27 08:21:58 crc kubenswrapper[4787]: I0127 08:21:58.875449 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bjssk_56eb4b72-a8dc-4300-882e-e29d83442af5/registry-server/0.log" Jan 27 08:21:59 crc kubenswrapper[4787]: I0127 08:21:59.034219 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gjx52_6953906d-8a35-4d3e-83a3-3a7451e834cc/extract-content/0.log" Jan 27 08:21:59 crc kubenswrapper[4787]: I0127 08:21:59.052364 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gjx52_6953906d-8a35-4d3e-83a3-3a7451e834cc/extract-content/0.log" Jan 27 08:21:59 crc kubenswrapper[4787]: I0127 08:21:59.070272 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gjx52_6953906d-8a35-4d3e-83a3-3a7451e834cc/extract-utilities/0.log" Jan 27 08:21:59 crc kubenswrapper[4787]: I0127 08:21:59.080205 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:21:59 crc kubenswrapper[4787]: E0127 08:21:59.080426 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:21:59 crc kubenswrapper[4787]: I0127 08:21:59.414143 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gjx52_6953906d-8a35-4d3e-83a3-3a7451e834cc/extract-content/0.log" Jan 27 08:21:59 crc kubenswrapper[4787]: I0127 08:21:59.496729 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gjx52_6953906d-8a35-4d3e-83a3-3a7451e834cc/extract-utilities/0.log" Jan 27 08:21:59 crc kubenswrapper[4787]: I0127 08:21:59.679230 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-fkd29_39675fef-cac4-48c9-bd77-e0ee695a5ab8/marketplace-operator/0.log" Jan 27 08:21:59 crc kubenswrapper[4787]: I0127 08:21:59.716123 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gjx52_6953906d-8a35-4d3e-83a3-3a7451e834cc/registry-server/0.log" Jan 27 08:21:59 crc kubenswrapper[4787]: I0127 08:21:59.842420 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rdpsb_b6daca56-9e67-41d1-80da-c213717daead/extract-utilities/0.log" Jan 27 08:21:59 crc kubenswrapper[4787]: I0127 08:21:59.933483 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rdpsb_b6daca56-9e67-41d1-80da-c213717daead/extract-utilities/0.log" Jan 27 08:21:59 crc kubenswrapper[4787]: I0127 08:21:59.998569 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rdpsb_b6daca56-9e67-41d1-80da-c213717daead/extract-content/0.log" Jan 27 08:22:00 crc kubenswrapper[4787]: I0127 08:22:00.139009 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rdpsb_b6daca56-9e67-41d1-80da-c213717daead/extract-content/0.log" Jan 27 08:22:00 crc kubenswrapper[4787]: I0127 08:22:00.275948 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rdpsb_b6daca56-9e67-41d1-80da-c213717daead/extract-content/0.log" Jan 27 08:22:00 crc kubenswrapper[4787]: I0127 08:22:00.324474 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rdpsb_b6daca56-9e67-41d1-80da-c213717daead/extract-utilities/0.log" Jan 27 08:22:00 crc kubenswrapper[4787]: I0127 08:22:00.400733 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rdpsb_b6daca56-9e67-41d1-80da-c213717daead/registry-server/0.log" Jan 27 08:22:00 crc kubenswrapper[4787]: I0127 08:22:00.413347 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-8bz6z_3c2c8464-ea72-46ca-a23d-ae23e6617ce8/extract-utilities/0.log" Jan 27 08:22:00 crc kubenswrapper[4787]: I0127 08:22:00.582703 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-8bz6z_3c2c8464-ea72-46ca-a23d-ae23e6617ce8/extract-content/0.log" Jan 27 08:22:00 crc kubenswrapper[4787]: I0127 08:22:00.598802 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-8bz6z_3c2c8464-ea72-46ca-a23d-ae23e6617ce8/extract-utilities/0.log" Jan 27 08:22:00 crc kubenswrapper[4787]: I0127 08:22:00.604590 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-8bz6z_3c2c8464-ea72-46ca-a23d-ae23e6617ce8/extract-content/0.log" Jan 27 08:22:00 crc kubenswrapper[4787]: I0127 08:22:00.811622 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-8bz6z_3c2c8464-ea72-46ca-a23d-ae23e6617ce8/extract-utilities/0.log" Jan 27 08:22:00 crc kubenswrapper[4787]: I0127 08:22:00.827615 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-8bz6z_3c2c8464-ea72-46ca-a23d-ae23e6617ce8/extract-content/0.log" Jan 27 08:22:01 crc kubenswrapper[4787]: I0127 08:22:01.077032 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-8bz6z_3c2c8464-ea72-46ca-a23d-ae23e6617ce8/registry-server/0.log" Jan 27 08:22:10 crc kubenswrapper[4787]: I0127 08:22:10.076280 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:22:10 crc kubenswrapper[4787]: E0127 08:22:10.076997 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:22:22 crc kubenswrapper[4787]: I0127 08:22:22.076696 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:22:22 crc kubenswrapper[4787]: E0127 08:22:22.077464 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:22:37 crc kubenswrapper[4787]: I0127 08:22:37.076759 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:22:37 crc kubenswrapper[4787]: E0127 08:22:37.077682 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:22:51 crc kubenswrapper[4787]: I0127 08:22:51.077043 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:22:51 crc kubenswrapper[4787]: E0127 08:22:51.077894 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:23:06 crc kubenswrapper[4787]: I0127 08:23:06.077132 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:23:06 crc kubenswrapper[4787]: E0127 08:23:06.078040 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:23:18 crc kubenswrapper[4787]: I0127 08:23:18.682414 4787 generic.go:334] "Generic (PLEG): container finished" podID="8f8dfa07-27b1-450f-9019-56d0ced6d238" containerID="f30e73fea6424d4233a5355b7aa6b2d90d5724e1970661129e89efcc148da156" exitCode=0 Jan 27 08:23:18 crc kubenswrapper[4787]: I0127 08:23:18.682514 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zjjpz/must-gather-hldr6" event={"ID":"8f8dfa07-27b1-450f-9019-56d0ced6d238","Type":"ContainerDied","Data":"f30e73fea6424d4233a5355b7aa6b2d90d5724e1970661129e89efcc148da156"} Jan 27 08:23:18 crc kubenswrapper[4787]: I0127 08:23:18.683543 4787 scope.go:117] "RemoveContainer" containerID="f30e73fea6424d4233a5355b7aa6b2d90d5724e1970661129e89efcc148da156" Jan 27 08:23:19 crc kubenswrapper[4787]: I0127 08:23:19.372793 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zjjpz_must-gather-hldr6_8f8dfa07-27b1-450f-9019-56d0ced6d238/gather/0.log" Jan 27 08:23:21 crc kubenswrapper[4787]: I0127 08:23:21.076929 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:23:21 crc kubenswrapper[4787]: E0127 08:23:21.077573 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:23:26 crc kubenswrapper[4787]: I0127 08:23:26.624267 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zjjpz/must-gather-hldr6"] Jan 27 08:23:26 crc kubenswrapper[4787]: I0127 08:23:26.625206 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-zjjpz/must-gather-hldr6" podUID="8f8dfa07-27b1-450f-9019-56d0ced6d238" containerName="copy" containerID="cri-o://387fee2205dae8bd6763255730dec63813d6d41dc01c06d500efe409bc6e4380" gracePeriod=2 Jan 27 08:23:26 crc kubenswrapper[4787]: I0127 08:23:26.631101 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zjjpz/must-gather-hldr6"] Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.048297 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zjjpz_must-gather-hldr6_8f8dfa07-27b1-450f-9019-56d0ced6d238/copy/0.log" Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.049184 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zjjpz/must-gather-hldr6" Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.164509 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrwnk\" (UniqueName: \"kubernetes.io/projected/8f8dfa07-27b1-450f-9019-56d0ced6d238-kube-api-access-lrwnk\") pod \"8f8dfa07-27b1-450f-9019-56d0ced6d238\" (UID: \"8f8dfa07-27b1-450f-9019-56d0ced6d238\") " Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.164576 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8f8dfa07-27b1-450f-9019-56d0ced6d238-must-gather-output\") pod \"8f8dfa07-27b1-450f-9019-56d0ced6d238\" (UID: \"8f8dfa07-27b1-450f-9019-56d0ced6d238\") " Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.169962 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f8dfa07-27b1-450f-9019-56d0ced6d238-kube-api-access-lrwnk" (OuterVolumeSpecName: "kube-api-access-lrwnk") pod "8f8dfa07-27b1-450f-9019-56d0ced6d238" (UID: "8f8dfa07-27b1-450f-9019-56d0ced6d238"). InnerVolumeSpecName "kube-api-access-lrwnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.257934 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f8dfa07-27b1-450f-9019-56d0ced6d238-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "8f8dfa07-27b1-450f-9019-56d0ced6d238" (UID: "8f8dfa07-27b1-450f-9019-56d0ced6d238"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.266953 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrwnk\" (UniqueName: \"kubernetes.io/projected/8f8dfa07-27b1-450f-9019-56d0ced6d238-kube-api-access-lrwnk\") on node \"crc\" DevicePath \"\"" Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.266986 4787 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8f8dfa07-27b1-450f-9019-56d0ced6d238-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.747605 4787 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zjjpz_must-gather-hldr6_8f8dfa07-27b1-450f-9019-56d0ced6d238/copy/0.log" Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.748474 4787 generic.go:334] "Generic (PLEG): container finished" podID="8f8dfa07-27b1-450f-9019-56d0ced6d238" containerID="387fee2205dae8bd6763255730dec63813d6d41dc01c06d500efe409bc6e4380" exitCode=143 Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.748538 4787 scope.go:117] "RemoveContainer" containerID="387fee2205dae8bd6763255730dec63813d6d41dc01c06d500efe409bc6e4380" Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.748582 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zjjpz/must-gather-hldr6" Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.787766 4787 scope.go:117] "RemoveContainer" containerID="f30e73fea6424d4233a5355b7aa6b2d90d5724e1970661129e89efcc148da156" Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.839125 4787 scope.go:117] "RemoveContainer" containerID="387fee2205dae8bd6763255730dec63813d6d41dc01c06d500efe409bc6e4380" Jan 27 08:23:27 crc kubenswrapper[4787]: E0127 08:23:27.839630 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"387fee2205dae8bd6763255730dec63813d6d41dc01c06d500efe409bc6e4380\": container with ID starting with 387fee2205dae8bd6763255730dec63813d6d41dc01c06d500efe409bc6e4380 not found: ID does not exist" containerID="387fee2205dae8bd6763255730dec63813d6d41dc01c06d500efe409bc6e4380" Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.839656 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"387fee2205dae8bd6763255730dec63813d6d41dc01c06d500efe409bc6e4380"} err="failed to get container status \"387fee2205dae8bd6763255730dec63813d6d41dc01c06d500efe409bc6e4380\": rpc error: code = NotFound desc = could not find container \"387fee2205dae8bd6763255730dec63813d6d41dc01c06d500efe409bc6e4380\": container with ID starting with 387fee2205dae8bd6763255730dec63813d6d41dc01c06d500efe409bc6e4380 not found: ID does not exist" Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.839678 4787 scope.go:117] "RemoveContainer" containerID="f30e73fea6424d4233a5355b7aa6b2d90d5724e1970661129e89efcc148da156" Jan 27 08:23:27 crc kubenswrapper[4787]: E0127 08:23:27.839921 4787 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f30e73fea6424d4233a5355b7aa6b2d90d5724e1970661129e89efcc148da156\": container with ID starting with f30e73fea6424d4233a5355b7aa6b2d90d5724e1970661129e89efcc148da156 not found: ID does not exist" containerID="f30e73fea6424d4233a5355b7aa6b2d90d5724e1970661129e89efcc148da156" Jan 27 08:23:27 crc kubenswrapper[4787]: I0127 08:23:27.839940 4787 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f30e73fea6424d4233a5355b7aa6b2d90d5724e1970661129e89efcc148da156"} err="failed to get container status \"f30e73fea6424d4233a5355b7aa6b2d90d5724e1970661129e89efcc148da156\": rpc error: code = NotFound desc = could not find container \"f30e73fea6424d4233a5355b7aa6b2d90d5724e1970661129e89efcc148da156\": container with ID starting with f30e73fea6424d4233a5355b7aa6b2d90d5724e1970661129e89efcc148da156 not found: ID does not exist" Jan 27 08:23:29 crc kubenswrapper[4787]: I0127 08:23:29.085360 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f8dfa07-27b1-450f-9019-56d0ced6d238" path="/var/lib/kubelet/pods/8f8dfa07-27b1-450f-9019-56d0ced6d238/volumes" Jan 27 08:23:33 crc kubenswrapper[4787]: I0127 08:23:33.077110 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:23:33 crc kubenswrapper[4787]: E0127 08:23:33.077860 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:23:48 crc kubenswrapper[4787]: I0127 08:23:48.081048 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:23:48 crc kubenswrapper[4787]: E0127 08:23:48.082150 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:23:59 crc kubenswrapper[4787]: I0127 08:23:59.077370 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:23:59 crc kubenswrapper[4787]: E0127 08:23:59.078282 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:24:13 crc kubenswrapper[4787]: I0127 08:24:13.076685 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:24:13 crc kubenswrapper[4787]: E0127 08:24:13.077646 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.573356 4787 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5h5v6"] Jan 27 08:24:25 crc kubenswrapper[4787]: E0127 08:24:25.574307 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f8dfa07-27b1-450f-9019-56d0ced6d238" containerName="copy" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.574326 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f8dfa07-27b1-450f-9019-56d0ced6d238" containerName="copy" Jan 27 08:24:25 crc kubenswrapper[4787]: E0127 08:24:25.574348 4787 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f8dfa07-27b1-450f-9019-56d0ced6d238" containerName="gather" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.574357 4787 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f8dfa07-27b1-450f-9019-56d0ced6d238" containerName="gather" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.574566 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f8dfa07-27b1-450f-9019-56d0ced6d238" containerName="gather" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.574597 4787 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f8dfa07-27b1-450f-9019-56d0ced6d238" containerName="copy" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.575663 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.597957 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5h5v6"] Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.643458 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d52ceef-4485-4ef8-a928-e674645895d7-catalog-content\") pod \"redhat-operators-5h5v6\" (UID: \"5d52ceef-4485-4ef8-a928-e674645895d7\") " pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.643512 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwhvk\" (UniqueName: \"kubernetes.io/projected/5d52ceef-4485-4ef8-a928-e674645895d7-kube-api-access-cwhvk\") pod \"redhat-operators-5h5v6\" (UID: \"5d52ceef-4485-4ef8-a928-e674645895d7\") " pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.643578 4787 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d52ceef-4485-4ef8-a928-e674645895d7-utilities\") pod \"redhat-operators-5h5v6\" (UID: \"5d52ceef-4485-4ef8-a928-e674645895d7\") " pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.745424 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d52ceef-4485-4ef8-a928-e674645895d7-catalog-content\") pod \"redhat-operators-5h5v6\" (UID: \"5d52ceef-4485-4ef8-a928-e674645895d7\") " pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.745826 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwhvk\" (UniqueName: \"kubernetes.io/projected/5d52ceef-4485-4ef8-a928-e674645895d7-kube-api-access-cwhvk\") pod \"redhat-operators-5h5v6\" (UID: \"5d52ceef-4485-4ef8-a928-e674645895d7\") " pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.745866 4787 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d52ceef-4485-4ef8-a928-e674645895d7-utilities\") pod \"redhat-operators-5h5v6\" (UID: \"5d52ceef-4485-4ef8-a928-e674645895d7\") " pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.746074 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d52ceef-4485-4ef8-a928-e674645895d7-catalog-content\") pod \"redhat-operators-5h5v6\" (UID: \"5d52ceef-4485-4ef8-a928-e674645895d7\") " pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.746122 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d52ceef-4485-4ef8-a928-e674645895d7-utilities\") pod \"redhat-operators-5h5v6\" (UID: \"5d52ceef-4485-4ef8-a928-e674645895d7\") " pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.770635 4787 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwhvk\" (UniqueName: \"kubernetes.io/projected/5d52ceef-4485-4ef8-a928-e674645895d7-kube-api-access-cwhvk\") pod \"redhat-operators-5h5v6\" (UID: \"5d52ceef-4485-4ef8-a928-e674645895d7\") " pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:25 crc kubenswrapper[4787]: I0127 08:24:25.903913 4787 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:26 crc kubenswrapper[4787]: I0127 08:24:26.076108 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:24:26 crc kubenswrapper[4787]: E0127 08:24:26.076717 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:24:26 crc kubenswrapper[4787]: I0127 08:24:26.393136 4787 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5h5v6"] Jan 27 08:24:27 crc kubenswrapper[4787]: I0127 08:24:27.187578 4787 generic.go:334] "Generic (PLEG): container finished" podID="5d52ceef-4485-4ef8-a928-e674645895d7" containerID="1dad88948101345937304f02d97549de9181a706153003bd851bda10eadc04f8" exitCode=0 Jan 27 08:24:27 crc kubenswrapper[4787]: I0127 08:24:27.187608 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5h5v6" event={"ID":"5d52ceef-4485-4ef8-a928-e674645895d7","Type":"ContainerDied","Data":"1dad88948101345937304f02d97549de9181a706153003bd851bda10eadc04f8"} Jan 27 08:24:27 crc kubenswrapper[4787]: I0127 08:24:27.187917 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5h5v6" event={"ID":"5d52ceef-4485-4ef8-a928-e674645895d7","Type":"ContainerStarted","Data":"4eac58c9bc7e715496982aafc25fdbce3075831ece2d9aae7f17ee70e9365f45"} Jan 27 08:24:27 crc kubenswrapper[4787]: I0127 08:24:27.188968 4787 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 27 08:24:28 crc kubenswrapper[4787]: I0127 08:24:28.194874 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5h5v6" event={"ID":"5d52ceef-4485-4ef8-a928-e674645895d7","Type":"ContainerStarted","Data":"8891d4a222ee724f541e25eed3f2d449b6f1f186a201bb270d2404d4a73ed22f"} Jan 27 08:24:29 crc kubenswrapper[4787]: I0127 08:24:29.204598 4787 generic.go:334] "Generic (PLEG): container finished" podID="5d52ceef-4485-4ef8-a928-e674645895d7" containerID="8891d4a222ee724f541e25eed3f2d449b6f1f186a201bb270d2404d4a73ed22f" exitCode=0 Jan 27 08:24:29 crc kubenswrapper[4787]: I0127 08:24:29.204672 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5h5v6" event={"ID":"5d52ceef-4485-4ef8-a928-e674645895d7","Type":"ContainerDied","Data":"8891d4a222ee724f541e25eed3f2d449b6f1f186a201bb270d2404d4a73ed22f"} Jan 27 08:24:30 crc kubenswrapper[4787]: I0127 08:24:30.214394 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5h5v6" event={"ID":"5d52ceef-4485-4ef8-a928-e674645895d7","Type":"ContainerStarted","Data":"cfec861a461574c11bd800dda12c846a44c5492289758881603832f829ad865b"} Jan 27 08:24:30 crc kubenswrapper[4787]: I0127 08:24:30.232508 4787 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5h5v6" podStartSLOduration=2.791434947 podStartE2EDuration="5.232485844s" podCreationTimestamp="2026-01-27 08:24:25 +0000 UTC" firstStartedPulling="2026-01-27 08:24:27.188790958 +0000 UTC m=+1972.841146450" lastFinishedPulling="2026-01-27 08:24:29.629841855 +0000 UTC m=+1975.282197347" observedRunningTime="2026-01-27 08:24:30.231405239 +0000 UTC m=+1975.883760741" watchObservedRunningTime="2026-01-27 08:24:30.232485844 +0000 UTC m=+1975.884841366" Jan 27 08:24:35 crc kubenswrapper[4787]: I0127 08:24:35.904827 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:35 crc kubenswrapper[4787]: I0127 08:24:35.905567 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:35 crc kubenswrapper[4787]: I0127 08:24:35.944089 4787 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:36 crc kubenswrapper[4787]: I0127 08:24:36.295844 4787 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:36 crc kubenswrapper[4787]: I0127 08:24:36.349267 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5h5v6"] Jan 27 08:24:38 crc kubenswrapper[4787]: I0127 08:24:38.268719 4787 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5h5v6" podUID="5d52ceef-4485-4ef8-a928-e674645895d7" containerName="registry-server" containerID="cri-o://cfec861a461574c11bd800dda12c846a44c5492289758881603832f829ad865b" gracePeriod=2 Jan 27 08:24:40 crc kubenswrapper[4787]: I0127 08:24:40.076967 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:24:40 crc kubenswrapper[4787]: E0127 08:24:40.077519 4787 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4fh5_openshift-machine-config-operator(f051e184-acac-47cf-9e04-9df648288715)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" podUID="f051e184-acac-47cf-9e04-9df648288715" Jan 27 08:24:40 crc kubenswrapper[4787]: I0127 08:24:40.296322 4787 generic.go:334] "Generic (PLEG): container finished" podID="5d52ceef-4485-4ef8-a928-e674645895d7" containerID="cfec861a461574c11bd800dda12c846a44c5492289758881603832f829ad865b" exitCode=0 Jan 27 08:24:40 crc kubenswrapper[4787]: I0127 08:24:40.296380 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5h5v6" event={"ID":"5d52ceef-4485-4ef8-a928-e674645895d7","Type":"ContainerDied","Data":"cfec861a461574c11bd800dda12c846a44c5492289758881603832f829ad865b"} Jan 27 08:24:40 crc kubenswrapper[4787]: I0127 08:24:40.649951 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:40 crc kubenswrapper[4787]: I0127 08:24:40.658466 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d52ceef-4485-4ef8-a928-e674645895d7-utilities\") pod \"5d52ceef-4485-4ef8-a928-e674645895d7\" (UID: \"5d52ceef-4485-4ef8-a928-e674645895d7\") " Jan 27 08:24:40 crc kubenswrapper[4787]: I0127 08:24:40.658514 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwhvk\" (UniqueName: \"kubernetes.io/projected/5d52ceef-4485-4ef8-a928-e674645895d7-kube-api-access-cwhvk\") pod \"5d52ceef-4485-4ef8-a928-e674645895d7\" (UID: \"5d52ceef-4485-4ef8-a928-e674645895d7\") " Jan 27 08:24:40 crc kubenswrapper[4787]: I0127 08:24:40.658659 4787 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d52ceef-4485-4ef8-a928-e674645895d7-catalog-content\") pod \"5d52ceef-4485-4ef8-a928-e674645895d7\" (UID: \"5d52ceef-4485-4ef8-a928-e674645895d7\") " Jan 27 08:24:40 crc kubenswrapper[4787]: I0127 08:24:40.659275 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d52ceef-4485-4ef8-a928-e674645895d7-utilities" (OuterVolumeSpecName: "utilities") pod "5d52ceef-4485-4ef8-a928-e674645895d7" (UID: "5d52ceef-4485-4ef8-a928-e674645895d7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:24:40 crc kubenswrapper[4787]: I0127 08:24:40.669816 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d52ceef-4485-4ef8-a928-e674645895d7-kube-api-access-cwhvk" (OuterVolumeSpecName: "kube-api-access-cwhvk") pod "5d52ceef-4485-4ef8-a928-e674645895d7" (UID: "5d52ceef-4485-4ef8-a928-e674645895d7"). InnerVolumeSpecName "kube-api-access-cwhvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 08:24:40 crc kubenswrapper[4787]: I0127 08:24:40.760140 4787 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d52ceef-4485-4ef8-a928-e674645895d7-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 08:24:40 crc kubenswrapper[4787]: I0127 08:24:40.760365 4787 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwhvk\" (UniqueName: \"kubernetes.io/projected/5d52ceef-4485-4ef8-a928-e674645895d7-kube-api-access-cwhvk\") on node \"crc\" DevicePath \"\"" Jan 27 08:24:40 crc kubenswrapper[4787]: I0127 08:24:40.775138 4787 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d52ceef-4485-4ef8-a928-e674645895d7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5d52ceef-4485-4ef8-a928-e674645895d7" (UID: "5d52ceef-4485-4ef8-a928-e674645895d7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 08:24:40 crc kubenswrapper[4787]: I0127 08:24:40.861725 4787 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d52ceef-4485-4ef8-a928-e674645895d7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 08:24:41 crc kubenswrapper[4787]: I0127 08:24:41.304282 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5h5v6" event={"ID":"5d52ceef-4485-4ef8-a928-e674645895d7","Type":"ContainerDied","Data":"4eac58c9bc7e715496982aafc25fdbce3075831ece2d9aae7f17ee70e9365f45"} Jan 27 08:24:41 crc kubenswrapper[4787]: I0127 08:24:41.304572 4787 scope.go:117] "RemoveContainer" containerID="cfec861a461574c11bd800dda12c846a44c5492289758881603832f829ad865b" Jan 27 08:24:41 crc kubenswrapper[4787]: I0127 08:24:41.304616 4787 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5h5v6" Jan 27 08:24:41 crc kubenswrapper[4787]: I0127 08:24:41.335773 4787 scope.go:117] "RemoveContainer" containerID="8891d4a222ee724f541e25eed3f2d449b6f1f186a201bb270d2404d4a73ed22f" Jan 27 08:24:41 crc kubenswrapper[4787]: I0127 08:24:41.344639 4787 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5h5v6"] Jan 27 08:24:41 crc kubenswrapper[4787]: I0127 08:24:41.346311 4787 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5h5v6"] Jan 27 08:24:41 crc kubenswrapper[4787]: I0127 08:24:41.409859 4787 scope.go:117] "RemoveContainer" containerID="1dad88948101345937304f02d97549de9181a706153003bd851bda10eadc04f8" Jan 27 08:24:43 crc kubenswrapper[4787]: I0127 08:24:43.085298 4787 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d52ceef-4485-4ef8-a928-e674645895d7" path="/var/lib/kubelet/pods/5d52ceef-4485-4ef8-a928-e674645895d7/volumes" Jan 27 08:24:55 crc kubenswrapper[4787]: I0127 08:24:55.080908 4787 scope.go:117] "RemoveContainer" containerID="125d3b0a12c5355f78fe53f0c5060ee333e1de0ab42bf300e460906f195efd1e" Jan 27 08:24:55 crc kubenswrapper[4787]: I0127 08:24:55.417922 4787 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4fh5" event={"ID":"f051e184-acac-47cf-9e04-9df648288715","Type":"ContainerStarted","Data":"fddd18f1ff4cbc977bd28c15a1789e1044c68d26be8f818f7c210dbd846d2077"}